[ 527.631975] env[69027]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69027) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 527.632335] env[69027]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69027) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 527.632449] env[69027]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69027) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 527.632780] env[69027]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 527.718842] env[69027]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69027) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 527.728810] env[69027]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=69027) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 527.865242] env[69027]: INFO nova.virt.driver [None req-a7e10942-92de-4b52-af77-5f46f514af89 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 527.937934] env[69027]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.938119] env[69027]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.938215] env[69027]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69027) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 531.114204] env[69027]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-fa96bdb8-24e0-431b-8d5e-aac104cce925 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.130714] env[69027]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69027) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 531.130913] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-2e5eb527-5eaf-4256-927f-88a58dd38be6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.169385] env[69027]: INFO oslo_vmware.api [-] Successfully established new session; session ID is e0dbe. [ 531.169551] env[69027]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.231s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.170077] env[69027]: INFO nova.virt.vmwareapi.driver [None req-a7e10942-92de-4b52-af77-5f46f514af89 None None] VMware vCenter version: 7.0.3 [ 531.173537] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df892b9f-acf3-4f17-885e-1b9fe08819b7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.194816] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c782101c-fcdf-4feb-a4ff-895bdbcc98cf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.200874] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa720403-b939-4356-b7d0-22f1b467fae8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.207542] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b816aa2b-f04f-45de-baed-b3db57b016aa {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.221011] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37edc6a5-7a40-40cb-8b9b-a6ace7001565 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.226839] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9caad9de-2713-4225-ac49-9692f0b997cc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.256618] env[69027]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-fc7045f6-3282-4ddb-80f1-45ffd3adbac2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.261580] env[69027]: DEBUG nova.virt.vmwareapi.driver [None req-a7e10942-92de-4b52-af77-5f46f514af89 None None] Extension org.openstack.compute already exists. {{(pid=69027) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 531.264181] env[69027]: INFO nova.compute.provider_config [None req-a7e10942-92de-4b52-af77-5f46f514af89 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 531.283556] env[69027]: DEBUG nova.context [None req-a7e10942-92de-4b52-af77-5f46f514af89 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),d85b5a17-002c-451d-94e7-2f937a92581b(cell1) {{(pid=69027) load_cells /opt/stack/nova/nova/context.py:464}} [ 531.285507] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.285728] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.286422] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.286839] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Acquiring lock "d85b5a17-002c-451d-94e7-2f937a92581b" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.287045] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Lock "d85b5a17-002c-451d-94e7-2f937a92581b" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.288085] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Lock "d85b5a17-002c-451d-94e7-2f937a92581b" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.308238] env[69027]: INFO dbcounter [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Registered counter for database nova_cell0 [ 531.316374] env[69027]: INFO dbcounter [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Registered counter for database nova_cell1 [ 531.319354] env[69027]: DEBUG oslo_db.sqlalchemy.engines [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69027) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 531.319720] env[69027]: DEBUG oslo_db.sqlalchemy.engines [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69027) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 531.324524] env[69027]: DEBUG dbcounter [-] [69027] Writer thread running {{(pid=69027) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 531.324679] env[69027]: DEBUG dbcounter [-] [69027] Writer thread running {{(pid=69027) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 531.327465] env[69027]: ERROR nova.db.main.api [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 531.327465] env[69027]: result = function(*args, **kwargs) [ 531.327465] env[69027]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 531.327465] env[69027]: return func(*args, **kwargs) [ 531.327465] env[69027]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 531.327465] env[69027]: result = fn(*args, **kwargs) [ 531.327465] env[69027]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 531.327465] env[69027]: return f(*args, **kwargs) [ 531.327465] env[69027]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 531.327465] env[69027]: return db.service_get_minimum_version(context, binaries) [ 531.327465] env[69027]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 531.327465] env[69027]: _check_db_access() [ 531.327465] env[69027]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 531.327465] env[69027]: stacktrace = ''.join(traceback.format_stack()) [ 531.327465] env[69027]: [ 531.328220] env[69027]: ERROR nova.db.main.api [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 531.328220] env[69027]: result = function(*args, **kwargs) [ 531.328220] env[69027]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 531.328220] env[69027]: return func(*args, **kwargs) [ 531.328220] env[69027]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 531.328220] env[69027]: result = fn(*args, **kwargs) [ 531.328220] env[69027]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 531.328220] env[69027]: return f(*args, **kwargs) [ 531.328220] env[69027]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 531.328220] env[69027]: return db.service_get_minimum_version(context, binaries) [ 531.328220] env[69027]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 531.328220] env[69027]: _check_db_access() [ 531.328220] env[69027]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 531.328220] env[69027]: stacktrace = ''.join(traceback.format_stack()) [ 531.328220] env[69027]: [ 531.328810] env[69027]: WARNING nova.objects.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 531.328810] env[69027]: WARNING nova.objects.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Failed to get minimum service version for cell d85b5a17-002c-451d-94e7-2f937a92581b [ 531.329162] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Acquiring lock "singleton_lock" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 531.329326] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Acquired lock "singleton_lock" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 531.329579] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Releasing lock "singleton_lock" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 531.329893] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Full set of CONF: {{(pid=69027) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 531.330052] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ******************************************************************************** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 531.330187] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] Configuration options gathered from: {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 531.330324] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 531.330514] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 531.330642] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ================================================================================ {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 531.330853] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] allow_resize_to_same_host = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.331036] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] arq_binding_timeout = 300 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.331175] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] backdoor_port = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.331305] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] backdoor_socket = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.331476] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] block_device_allocate_retries = 60 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.331638] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] block_device_allocate_retries_interval = 3 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.331807] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cert = self.pem {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.331975] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.332159] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute_monitors = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.332331] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] config_dir = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.332504] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] config_drive_format = iso9660 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.332640] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.332805] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] config_source = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.332973] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] console_host = devstack {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.333152] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] control_exchange = nova {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.333311] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cpu_allocation_ratio = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.333471] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] daemon = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.333639] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] debug = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.333794] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] default_access_ip_network_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.333957] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] default_availability_zone = nova {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.334124] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] default_ephemeral_format = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.334285] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] default_green_pool_size = 1000 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.334516] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.334679] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] default_schedule_zone = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.334835] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] disk_allocation_ratio = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.334995] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] enable_new_services = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.335422] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] enabled_apis = ['osapi_compute'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.335422] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] enabled_ssl_apis = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.335506] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] flat_injected = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.335654] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] force_config_drive = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.335812] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] force_raw_images = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.335977] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] graceful_shutdown_timeout = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.336154] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] heal_instance_info_cache_interval = 60 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.336407] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] host = cpu-1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.336593] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.336759] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] initial_disk_allocation_ratio = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.336923] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] initial_ram_allocation_ratio = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.337159] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.337366] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] instance_build_timeout = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.337537] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] instance_delete_interval = 300 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.337707] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] instance_format = [instance: %(uuid)s] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.337875] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] instance_name_template = instance-%08x {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.338049] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] instance_usage_audit = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.338226] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] instance_usage_audit_period = month {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.338394] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.338585] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] instances_path = /opt/stack/data/nova/instances {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.338768] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] internal_service_availability_zone = internal {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.338929] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] key = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.339104] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] live_migration_retry_count = 30 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.339273] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] log_config_append = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.339444] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.339604] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] log_dir = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.339763] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] log_file = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.339892] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] log_options = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.340065] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] log_rotate_interval = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.340239] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] log_rotate_interval_type = days {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.340409] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] log_rotation_type = none {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.340543] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.340674] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.340845] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.341027] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.341154] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.341321] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] long_rpc_timeout = 1800 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.341482] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] max_concurrent_builds = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.341640] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] max_concurrent_live_migrations = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.341799] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] max_concurrent_snapshots = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.341958] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] max_local_block_devices = 3 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.342130] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] max_logfile_count = 30 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.342292] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] max_logfile_size_mb = 200 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.342454] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] maximum_instance_delete_attempts = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.342621] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] metadata_listen = 0.0.0.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.342787] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] metadata_listen_port = 8775 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.342956] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] metadata_workers = 2 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.343132] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] migrate_max_retries = -1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.343303] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] mkisofs_cmd = genisoimage {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.343512] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] my_block_storage_ip = 10.180.1.21 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.343645] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] my_ip = 10.180.1.21 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.343809] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] network_allocate_retries = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.343989] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.344172] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] osapi_compute_listen = 0.0.0.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.344337] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] osapi_compute_listen_port = 8774 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.344505] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] osapi_compute_unique_server_name_scope = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.344673] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] osapi_compute_workers = 2 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.344833] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] password_length = 12 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.345042] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] periodic_enable = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.345213] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] periodic_fuzzy_delay = 60 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.345387] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] pointer_model = usbtablet {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.345557] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] preallocate_images = none {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.345717] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] publish_errors = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.345847] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] pybasedir = /opt/stack/nova {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.346010] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ram_allocation_ratio = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.346198] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] rate_limit_burst = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.346385] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] rate_limit_except_level = CRITICAL {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.346555] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] rate_limit_interval = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.346718] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] reboot_timeout = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.346878] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] reclaim_instance_interval = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.347046] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] record = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.347255] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] reimage_timeout_per_gb = 60 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.347443] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] report_interval = 120 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.347611] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] rescue_timeout = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.347770] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] reserved_host_cpus = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.347930] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] reserved_host_disk_mb = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.348102] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] reserved_host_memory_mb = 512 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.348265] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] reserved_huge_pages = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.348427] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] resize_confirm_window = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.348622] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] resize_fs_using_block_device = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.348790] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] resume_guests_state_on_host_boot = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.348963] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.349143] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] rpc_response_timeout = 60 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.349306] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] run_external_periodic_tasks = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.349477] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] running_deleted_instance_action = reap {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.349640] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] running_deleted_instance_poll_interval = 1800 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.349800] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] running_deleted_instance_timeout = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.349959] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] scheduler_instance_sync_interval = 120 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.350144] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] service_down_time = 720 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.350317] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] servicegroup_driver = db {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.350480] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] shelved_offload_time = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.350640] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] shelved_poll_interval = 3600 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.350808] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] shutdown_timeout = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.350970] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] source_is_ipv6 = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.351144] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ssl_only = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.351388] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.351557] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] sync_power_state_interval = 600 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.351720] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] sync_power_state_pool_size = 1000 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.351890] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] syslog_log_facility = LOG_USER {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.352058] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] tempdir = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.352223] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] timeout_nbd = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.352395] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] transport_url = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.352561] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] update_resources_interval = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.352720] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] use_cow_images = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.352880] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] use_eventlog = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.353048] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] use_journal = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.353213] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] use_json = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.353373] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] use_rootwrap_daemon = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.353531] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] use_stderr = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.353687] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] use_syslog = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.353843] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vcpu_pin_set = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.354016] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plugging_is_fatal = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.354190] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plugging_timeout = 300 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.354357] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] virt_mkfs = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.354520] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] volume_usage_poll_interval = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.354679] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] watch_log_file = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.354848] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] web = /usr/share/spice-html5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 531.355039] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_concurrency.disable_process_locking = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.355337] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.355520] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.355688] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.355861] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.356041] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.356238] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.356439] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.auth_strategy = keystone {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.356614] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.compute_link_prefix = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.356793] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.356967] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.dhcp_domain = novalocal {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.357165] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.enable_instance_password = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.357352] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.glance_link_prefix = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.357526] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.357700] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.357868] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.instance_list_per_project_cells = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.358045] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.list_records_by_skipping_down_cells = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.358219] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.local_metadata_per_cell = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.358390] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.max_limit = 1000 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.358584] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.metadata_cache_expiration = 15 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.358775] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.neutron_default_tenant_id = default {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.358947] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.use_neutron_default_nets = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.359132] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.359298] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.359475] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.359648] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.359815] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.vendordata_dynamic_targets = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.359982] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.vendordata_jsonfile_path = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.360414] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.360414] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.backend = dogpile.cache.memcached {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.360527] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.backend_argument = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.360698] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.config_prefix = cache.oslo {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.360871] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.dead_timeout = 60.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.361049] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.debug_cache_backend = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.361219] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.enable_retry_client = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.361381] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.enable_socket_keepalive = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.361553] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.enabled = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.361715] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.enforce_fips_mode = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.361881] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.expiration_time = 600 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.362058] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.hashclient_retry_attempts = 2 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.362230] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.hashclient_retry_delay = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.362399] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.memcache_dead_retry = 300 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.362559] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.memcache_password = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.362724] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.362888] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.363063] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.memcache_pool_maxsize = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.363252] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.363389] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.memcache_sasl_enabled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.363569] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.363740] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.memcache_socket_timeout = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.363905] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.memcache_username = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.364084] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.proxies = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.364273] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.redis_password = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.364424] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.redis_sentinel_service_name = mymaster {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.364598] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.364768] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.redis_server = localhost:6379 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.364934] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.redis_socket_timeout = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.365107] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.redis_username = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.365274] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.retry_attempts = 2 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.365445] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.retry_delay = 0.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.365604] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.socket_keepalive_count = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.365768] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.socket_keepalive_idle = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.365930] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.socket_keepalive_interval = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.366101] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.tls_allowed_ciphers = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.366289] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.tls_cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.366464] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.tls_certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.366630] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.tls_enabled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.366791] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cache.tls_keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.366963] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.auth_section = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.367162] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.auth_type = password {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.367371] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.367564] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.catalog_info = volumev3::publicURL {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.367732] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.367898] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.368077] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.cross_az_attach = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.368245] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.debug = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.368412] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.endpoint_template = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.368609] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.http_retries = 3 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.368786] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.368948] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.369138] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.os_region_name = RegionOne {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.369308] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.369470] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cinder.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.369645] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.369807] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.cpu_dedicated_set = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.369967] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.cpu_shared_set = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.370149] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.image_type_exclude_list = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.370315] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.370484] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.max_concurrent_disk_ops = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.370646] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.max_disk_devices_to_attach = -1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.370810] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.370980] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.371158] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.resource_provider_association_refresh = 300 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.371325] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.shutdown_retry_interval = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.371511] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.371690] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] conductor.workers = 2 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.371868] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] console.allowed_origins = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.372042] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] console.ssl_ciphers = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.372220] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] console.ssl_minimum_version = default {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.372395] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] consoleauth.enforce_session_timeout = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.372565] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] consoleauth.token_ttl = 600 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.372735] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.372894] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.373071] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.373235] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.connect_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.373401] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.connect_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.373558] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.endpoint_override = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.373720] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.373879] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.374049] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.max_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.374214] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.min_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.374373] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.region_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.374529] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.retriable_status_codes = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.374685] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.service_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.374853] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.service_type = accelerator {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.375023] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.375186] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.status_code_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.375347] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.status_code_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.375503] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.375681] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.375843] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] cyborg.version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.376036] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.backend = sqlalchemy {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.376233] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.connection = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.376422] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.connection_debug = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.376599] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.connection_parameters = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.376765] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.connection_recycle_time = 3600 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.376931] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.connection_trace = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.377111] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.db_inc_retry_interval = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.377318] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.db_max_retries = 20 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.377500] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.db_max_retry_interval = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.377666] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.db_retry_interval = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.377829] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.max_overflow = 50 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.377991] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.max_pool_size = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.378171] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.max_retries = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.378347] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.378523] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.mysql_wsrep_sync_wait = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.378701] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.pool_timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.378868] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.retry_interval = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.379036] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.slave_connection = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.379206] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.sqlite_synchronous = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.379408] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] database.use_db_reconnect = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.379597] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.backend = sqlalchemy {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.379768] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.connection = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.379936] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.connection_debug = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.380120] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.connection_parameters = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.380290] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.connection_recycle_time = 3600 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.380456] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.connection_trace = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.380618] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.db_inc_retry_interval = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.380780] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.db_max_retries = 20 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.380942] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.db_max_retry_interval = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.381118] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.db_retry_interval = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.381283] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.max_overflow = 50 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.381450] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.max_pool_size = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.381611] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.max_retries = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.381778] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.381938] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.382108] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.pool_timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.382274] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.retry_interval = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.382436] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.slave_connection = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.383955] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] api_database.sqlite_synchronous = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.383955] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] devices.enabled_mdev_types = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.383955] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.383955] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ephemeral_storage_encryption.default_format = luks {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.383955] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ephemeral_storage_encryption.enabled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.383955] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.384156] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.api_servers = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.384156] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.384156] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.384156] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.384264] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.connect_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.384392] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.connect_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.384566] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.debug = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.384725] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.default_trusted_certificate_ids = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.384887] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.enable_certificate_validation = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.385061] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.enable_rbd_download = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.385224] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.endpoint_override = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.385392] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.385554] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.385712] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.max_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.385869] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.min_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.386042] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.num_retries = 3 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.386247] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.rbd_ceph_conf = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.386430] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.rbd_connect_timeout = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.386606] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.rbd_pool = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.386776] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.rbd_user = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.386936] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.region_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.387112] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.retriable_status_codes = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.387320] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.service_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.387512] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.service_type = image {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.387682] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.387845] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.status_code_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.388014] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.status_code_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.388181] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.388369] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.388558] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.verify_glance_signatures = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.388737] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] glance.version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.388912] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] guestfs.debug = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.389097] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] mks.enabled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.389461] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.389656] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] image_cache.manager_interval = 2400 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.389828] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] image_cache.precache_concurrency = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.390012] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] image_cache.remove_unused_base_images = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.390192] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.390366] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.390544] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] image_cache.subdirectory_name = _base {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.390723] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.api_max_retries = 60 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.390891] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.api_retry_interval = 2 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.391071] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.auth_section = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.391242] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.auth_type = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.391450] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.391668] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.391787] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.391954] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.conductor_group = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.392129] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.connect_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.392294] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.connect_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.392457] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.endpoint_override = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.392622] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.392782] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.392942] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.max_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.393113] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.min_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.393281] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.peer_list = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.393443] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.region_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.393603] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.retriable_status_codes = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.393766] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.serial_console_state_timeout = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.393925] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.service_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.394109] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.service_type = baremetal {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.394274] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.shard = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.394438] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.394598] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.status_code_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.394752] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.status_code_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.394907] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.395098] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.395264] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ironic.version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.395452] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.395628] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] key_manager.fixed_key = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.395812] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.395975] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.barbican_api_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.396149] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.barbican_endpoint = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.396358] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.barbican_endpoint_type = public {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.396528] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.barbican_region_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.396690] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.396851] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.397026] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.397211] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.397407] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.397580] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.number_of_retries = 60 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.397745] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.retry_delay = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.397910] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.send_service_user_token = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.398084] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.398247] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.398411] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.verify_ssl = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.398570] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican.verify_ssl_path = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.398735] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican_service_user.auth_section = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.398900] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican_service_user.auth_type = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.399071] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican_service_user.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.399235] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican_service_user.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.399403] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican_service_user.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.399875] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican_service_user.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.399875] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican_service_user.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.399875] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican_service_user.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.400044] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] barbican_service_user.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.400207] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.approle_role_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.400368] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.approle_secret_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.400527] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.400685] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.400850] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.401020] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.401183] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.401357] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.kv_mountpoint = secret {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.401516] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.kv_path = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.401678] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.kv_version = 2 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.401836] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.namespace = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.401993] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.root_token_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.402170] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.402331] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.ssl_ca_crt_file = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.402488] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.402646] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.use_ssl = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.402818] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.402985] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.auth_section = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.403163] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.auth_type = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.403332] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.403521] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.403692] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.403855] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.connect_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.404024] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.connect_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.404192] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.endpoint_override = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.404357] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.404514] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.404670] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.max_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.404826] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.min_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.404983] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.region_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.405156] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.retriable_status_codes = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.405316] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.service_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.405488] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.service_type = identity {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.405650] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.405808] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.status_code_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.405966] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.status_code_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.406136] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.406351] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.406522] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] keystone.version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.406726] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.connection_uri = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.406890] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.cpu_mode = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.407068] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.cpu_model_extra_flags = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.407271] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.cpu_models = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.407473] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.cpu_power_governor_high = performance {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.407649] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.cpu_power_governor_low = powersave {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.407816] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.cpu_power_management = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.407987] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.408168] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.device_detach_attempts = 8 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.408334] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.device_detach_timeout = 20 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.408499] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.disk_cachemodes = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.408658] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.disk_prefix = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.408821] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.enabled_perf_events = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.408984] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.file_backed_memory = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.409162] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.gid_maps = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.409323] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.hw_disk_discard = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.409483] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.hw_machine_type = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.409655] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.images_rbd_ceph_conf = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.409822] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.409987] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.410173] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.images_rbd_glance_store_name = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.410345] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.images_rbd_pool = rbd {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.410517] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.images_type = default {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.410676] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.images_volume_group = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.410839] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.inject_key = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.411009] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.inject_partition = -2 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.411181] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.inject_password = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.411347] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.iscsi_iface = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.411510] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.iser_use_multipath = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.411674] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_bandwidth = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.411837] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.411999] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_downtime = 500 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.412176] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.412341] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.412501] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_inbound_addr = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.412661] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.412823] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_permit_post_copy = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.412981] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_scheme = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.413172] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_timeout_action = abort {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.413340] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_tunnelled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.413502] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_uri = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.413665] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.live_migration_with_native_tls = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.413825] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.max_queues = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.413986] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.414229] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.414396] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.nfs_mount_options = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.414691] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.414866] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.415045] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.num_iser_scan_tries = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.415214] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.num_memory_encrypted_guests = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.415401] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.415598] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.num_pcie_ports = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.415773] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.num_volume_scan_tries = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.415941] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.pmem_namespaces = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.416118] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.quobyte_client_cfg = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.416432] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.416614] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.rbd_connect_timeout = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.416783] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.416949] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.417131] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.rbd_secret_uuid = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.417313] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.rbd_user = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.417494] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.417670] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.remote_filesystem_transport = ssh {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.417830] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.rescue_image_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.417988] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.rescue_kernel_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.418161] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.rescue_ramdisk_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.418332] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.418495] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.rx_queue_size = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.418664] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.smbfs_mount_options = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.418934] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.419121] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.snapshot_compression = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.419289] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.snapshot_image_format = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.419509] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.419676] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.sparse_logical_volumes = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.419841] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.swtpm_enabled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.420021] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.swtpm_group = tss {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.420198] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.swtpm_user = tss {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.420371] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.sysinfo_serial = unique {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.420529] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.tb_cache_size = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.420686] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.tx_queue_size = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.420851] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.uid_maps = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.421021] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.use_virtio_for_bridges = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.421198] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.virt_type = kvm {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.421370] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.volume_clear = zero {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.421534] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.volume_clear_size = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.421702] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.volume_use_multipath = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.421862] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.vzstorage_cache_path = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.422041] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.422216] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.vzstorage_mount_group = qemu {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.422384] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.vzstorage_mount_opts = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.422554] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.422822] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.423011] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.vzstorage_mount_user = stack {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.423186] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.423363] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.auth_section = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.423540] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.auth_type = password {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.423702] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.423864] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.424039] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.424207] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.connect_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.424370] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.connect_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.424540] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.default_floating_pool = public {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.424700] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.endpoint_override = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.424862] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.extension_sync_interval = 600 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.425033] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.http_retries = 3 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.425199] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.425360] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.425519] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.max_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.425687] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.425847] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.min_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.426028] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.ovs_bridge = br-int {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.426205] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.physnets = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.426388] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.region_name = RegionOne {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.426550] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.retriable_status_codes = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.426721] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.service_metadata_proxy = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.426884] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.service_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.427065] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.service_type = network {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.427283] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.427495] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.status_code_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.427667] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.status_code_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.427829] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.428026] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.428196] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] neutron.version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.428372] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] notifications.bdms_in_notifications = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.428553] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] notifications.default_level = INFO {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.428730] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] notifications.notification_format = unversioned {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.428899] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] notifications.notify_on_state_change = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.429088] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.429270] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] pci.alias = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.429443] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] pci.device_spec = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.429609] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] pci.report_in_placement = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.429780] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.auth_section = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.429955] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.auth_type = password {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.430140] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.430306] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.430468] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.430633] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.430792] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.connect_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.430953] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.connect_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.431137] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.default_domain_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.431300] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.default_domain_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.431459] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.domain_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.431617] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.domain_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.431775] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.endpoint_override = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.431936] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.432107] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.432268] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.max_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.432427] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.min_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.432598] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.password = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.432759] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.project_domain_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.432929] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.project_domain_name = Default {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.433115] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.project_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.433298] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.project_name = service {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.433474] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.region_name = RegionOne {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.433637] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.retriable_status_codes = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.433797] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.service_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.433970] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.service_type = placement {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.434151] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.434314] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.status_code_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.434478] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.status_code_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.434639] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.system_scope = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.434797] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.434954] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.trust_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.435123] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.user_domain_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.435296] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.user_domain_name = Default {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.435458] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.user_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.435630] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.username = placement {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.435810] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.435971] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] placement.version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.436171] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.cores = 20 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.436364] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.count_usage_from_placement = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.436541] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.436715] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.injected_file_content_bytes = 10240 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.436881] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.injected_file_path_length = 255 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.437060] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.injected_files = 5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.437245] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.instances = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.437440] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.key_pairs = 100 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.437615] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.metadata_items = 128 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.437781] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.ram = 51200 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.437947] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.recheck_quota = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.438131] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.server_group_members = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.438302] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] quota.server_groups = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.438480] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.438645] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.438809] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] scheduler.image_metadata_prefilter = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.438971] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.439150] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] scheduler.max_attempts = 3 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.439317] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] scheduler.max_placement_results = 1000 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.439483] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.439647] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] scheduler.query_placement_for_image_type_support = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.439811] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.440054] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] scheduler.workers = 2 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.440255] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.440439] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.440620] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.440792] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.440960] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.441140] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.441309] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.441500] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.441671] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.host_subset_size = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.441838] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.441999] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.442179] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.442351] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.isolated_hosts = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.442517] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.isolated_images = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.442680] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.442841] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.443024] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.443198] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.pci_in_placement = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.443370] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.443535] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.443700] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.443863] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.444042] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.444206] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.444377] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.track_instance_changes = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.444556] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.444728] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] metrics.required = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.444894] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] metrics.weight_multiplier = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.445068] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.445239] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] metrics.weight_setting = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.445554] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.445734] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] serial_console.enabled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.445914] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] serial_console.port_range = 10000:20000 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.446099] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.446301] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.446483] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] serial_console.serialproxy_port = 6083 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.446674] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] service_user.auth_section = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.446866] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] service_user.auth_type = password {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.447044] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] service_user.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.447211] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] service_user.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.447402] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] service_user.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.447573] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] service_user.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.447735] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] service_user.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.447907] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] service_user.send_service_user_token = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.448085] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] service_user.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.448249] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] service_user.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.448424] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.agent_enabled = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.448604] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.enabled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.448918] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.449123] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.449300] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.html5proxy_port = 6082 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.449467] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.image_compression = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.449642] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.jpeg_compression = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.449829] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.playback_compression = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.450022] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.server_listen = 127.0.0.1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.450196] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.450360] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.streaming_mode = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.450518] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] spice.zlib_compression = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.450685] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] upgrade_levels.baseapi = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.450859] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] upgrade_levels.compute = auto {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.451028] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] upgrade_levels.conductor = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.451193] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] upgrade_levels.scheduler = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.451362] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vendordata_dynamic_auth.auth_section = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.451523] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vendordata_dynamic_auth.auth_type = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.451682] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vendordata_dynamic_auth.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.451841] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vendordata_dynamic_auth.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.452010] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.452179] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vendordata_dynamic_auth.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.452340] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vendordata_dynamic_auth.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.452504] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.452671] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vendordata_dynamic_auth.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.452864] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.api_retry_count = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.453037] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.ca_file = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.453216] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.cache_prefix = devstack-image-cache {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.453390] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.cluster_name = testcl1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.453558] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.connection_pool_size = 10 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.453716] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.console_delay_seconds = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.453885] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.datastore_regex = ^datastore.* {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.454133] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.454278] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.host_password = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.454446] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.host_port = 443 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.454616] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.host_username = administrator@vsphere.local {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.454786] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.insecure = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.454950] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.integration_bridge = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.455129] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.maximum_objects = 100 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.455291] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.pbm_default_policy = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.455456] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.pbm_enabled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.455615] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.pbm_wsdl_location = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.455807] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.455974] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.serial_port_proxy_uri = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.456150] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.serial_port_service_uri = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.456343] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.task_poll_interval = 0.5 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.456523] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.use_linked_clone = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.456696] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.vnc_keymap = en-us {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.456863] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.vnc_port = 5900 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.457036] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vmware.vnc_port_total = 10000 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.457229] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vnc.auth_schemes = ['none'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.457437] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vnc.enabled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.457740] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.457932] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.458119] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vnc.novncproxy_port = 6080 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.458304] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vnc.server_listen = 127.0.0.1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.458486] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.458681] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vnc.vencrypt_ca_certs = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.458848] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vnc.vencrypt_client_cert = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.459014] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vnc.vencrypt_client_key = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.459203] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.459372] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.disable_deep_image_inspection = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.459536] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.459697] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.459858] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.460028] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.disable_rootwrap = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.460195] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.enable_numa_live_migration = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.460358] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.460518] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.460678] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.460837] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.libvirt_disable_apic = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.460995] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.461170] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.461334] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.461495] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.461681] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.461848] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.462023] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.462187] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.462349] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.462512] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.462695] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.462864] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] wsgi.client_socket_timeout = 900 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.463045] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] wsgi.default_pool_size = 1000 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.463216] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] wsgi.keep_alive = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.463385] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] wsgi.max_header_line = 16384 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.463547] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] wsgi.secure_proxy_ssl_header = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.463706] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] wsgi.ssl_ca_file = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.463863] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] wsgi.ssl_cert_file = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.464032] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] wsgi.ssl_key_file = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.464265] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] wsgi.tcp_keepidle = 600 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.464461] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.464634] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] zvm.ca_file = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.464796] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] zvm.cloud_connector_url = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.465112] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.465300] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] zvm.reachable_timeout = 300 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.465487] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_policy.enforce_new_defaults = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.465661] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_policy.enforce_scope = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.465838] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_policy.policy_default_rule = default {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.466022] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.466227] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_policy.policy_file = policy.yaml {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.466409] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.466573] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.466731] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.466888] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.467066] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.467278] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.467477] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.467662] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler.connection_string = messaging:// {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.467828] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler.enabled = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.467997] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler.es_doc_type = notification {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.468177] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler.es_scroll_size = 10000 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.468347] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler.es_scroll_time = 2m {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.468508] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler.filter_error_trace = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.468675] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler.hmac_keys = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.468841] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler.sentinel_service_name = mymaster {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.469012] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler.socket_timeout = 0.1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.469189] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler.trace_requests = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.469350] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler.trace_sqlalchemy = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.469524] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler_jaeger.process_tags = {} {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.469682] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler_jaeger.service_name_prefix = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.469844] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] profiler_otlp.service_name_prefix = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.470018] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] remote_debug.host = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.470201] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] remote_debug.port = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.470396] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.470561] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.470724] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.470885] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.471062] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.471225] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.471389] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.471549] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.471710] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.471879] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.472046] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.472225] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.472396] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.472567] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.472737] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.472905] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.473080] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.473284] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.473458] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.473622] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.473786] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.473951] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.474129] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.474318] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.474458] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.474624] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.474785] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.474947] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.475129] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.475302] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.ssl = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.475473] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.475642] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.475804] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.475972] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.476167] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.ssl_version = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.476359] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.476557] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.476726] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_notifications.retry = -1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.476909] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.477097] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_messaging_notifications.transport_url = **** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.477283] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.auth_section = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.477464] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.auth_type = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.477627] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.cafile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.477787] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.certfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.477949] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.collect_timing = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.478121] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.connect_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.478280] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.connect_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.478439] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.endpoint_id = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.478627] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.endpoint_override = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.478798] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.insecure = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.478957] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.keyfile = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.479128] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.max_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.479286] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.min_version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.479443] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.region_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.479599] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.retriable_status_codes = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.479754] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.service_name = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.479911] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.service_type = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.480081] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.split_loggers = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.480242] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.status_code_retries = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.480400] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.status_code_retry_delay = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.480555] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.timeout = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.480709] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.valid_interfaces = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.480864] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_limit.version = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.481039] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_reports.file_event_handler = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.481207] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.481366] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] oslo_reports.log_dir = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.481546] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.481724] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.481884] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.482061] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.482230] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.482394] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.482563] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.482721] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_ovs_privileged.group = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.482878] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.483052] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.483220] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.483379] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] vif_plug_ovs_privileged.user = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.483548] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_linux_bridge.flat_interface = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.483723] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.483895] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.484082] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.484260] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.484430] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.484613] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.484788] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.484968] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.485154] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_ovs.isolate_vif = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.485328] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.485494] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.485661] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.485830] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_ovs.ovsdb_interface = native {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.485991] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_vif_ovs.per_port_bridge = False {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.486201] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_brick.lock_path = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.486376] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.486547] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.486717] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] privsep_osbrick.capabilities = [21] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.486877] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] privsep_osbrick.group = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.487043] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] privsep_osbrick.helper_command = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.487213] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.487397] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.487562] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] privsep_osbrick.user = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.487765] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.487930] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] nova_sys_admin.group = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.488100] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] nova_sys_admin.helper_command = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.488268] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.488434] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.488590] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] nova_sys_admin.user = None {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 531.488720] env[69027]: DEBUG oslo_service.service [None req-f897e691-b654-4301-a74d-2172028e58a5 None None] ******************************************************************************** {{(pid=69027) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 531.489143] env[69027]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 531.500392] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Getting list of instances from cluster (obj){ [ 531.500392] env[69027]: value = "domain-c8" [ 531.500392] env[69027]: _type = "ClusterComputeResource" [ 531.500392] env[69027]: } {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 531.501717] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8021f719-9301-4a02-a5fd-0f69efc3abc3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.511051] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Got total of 0 instances {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 531.511594] env[69027]: WARNING nova.virt.vmwareapi.driver [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 531.512055] env[69027]: INFO nova.virt.node [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Generated node identity 4923c91f-3b2b-4ad1-a821-36209acae639 [ 531.512276] env[69027]: INFO nova.virt.node [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Wrote node identity 4923c91f-3b2b-4ad1-a821-36209acae639 to /opt/stack/data/n-cpu-1/compute_id [ 531.524861] env[69027]: WARNING nova.compute.manager [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Compute nodes ['4923c91f-3b2b-4ad1-a821-36209acae639'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 531.560719] env[69027]: INFO nova.compute.manager [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 531.583847] env[69027]: WARNING nova.compute.manager [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 531.584428] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.584655] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.584814] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.584970] env[69027]: DEBUG nova.compute.resource_tracker [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 531.586045] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa7ea3c-53de-43ab-bdf8-918968daf5f1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.594437] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99afb14-8843-4da1-8a05-461331945a45 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.608125] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7ddd08-ab5c-4d20-8aa3-97e3c4586412 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.614244] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42273e9-96bc-43f6-ba7e-bbcd09687b5b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.644195] env[69027]: DEBUG nova.compute.resource_tracker [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180981MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 531.644356] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.644552] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.656320] env[69027]: WARNING nova.compute.resource_tracker [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] No compute node record for cpu-1:4923c91f-3b2b-4ad1-a821-36209acae639: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 4923c91f-3b2b-4ad1-a821-36209acae639 could not be found. [ 531.668922] env[69027]: INFO nova.compute.resource_tracker [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 4923c91f-3b2b-4ad1-a821-36209acae639 [ 531.722833] env[69027]: DEBUG nova.compute.resource_tracker [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 531.723035] env[69027]: DEBUG nova.compute.resource_tracker [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 531.824831] env[69027]: INFO nova.scheduler.client.report [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] [req-882e4f35-fd99-45c0-8980-b6dbdc79b367] Created resource provider record via placement API for resource provider with UUID 4923c91f-3b2b-4ad1-a821-36209acae639 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 531.843515] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543cb1cf-a964-4e4d-b5b4-86f67e7cccc5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.851659] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac87a16-ab32-4210-b658-ecce49830c73 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.881202] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e652ccec-cfbd-41a9-92b7-b8353c60e9de {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.888587] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680b6208-80d3-4ef7-a411-a9c5f34fc173 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.902078] env[69027]: DEBUG nova.compute.provider_tree [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Updating inventory in ProviderTree for provider 4923c91f-3b2b-4ad1-a821-36209acae639 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 531.940574] env[69027]: DEBUG nova.scheduler.client.report [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Updated inventory for provider 4923c91f-3b2b-4ad1-a821-36209acae639 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 531.940817] env[69027]: DEBUG nova.compute.provider_tree [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Updating resource provider 4923c91f-3b2b-4ad1-a821-36209acae639 generation from 0 to 1 during operation: update_inventory {{(pid=69027) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 531.940961] env[69027]: DEBUG nova.compute.provider_tree [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Updating inventory in ProviderTree for provider 4923c91f-3b2b-4ad1-a821-36209acae639 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 531.988968] env[69027]: DEBUG nova.compute.provider_tree [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Updating resource provider 4923c91f-3b2b-4ad1-a821-36209acae639 generation from 1 to 2 during operation: update_traits {{(pid=69027) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 532.006973] env[69027]: DEBUG nova.compute.resource_tracker [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 532.007211] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.363s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.007390] env[69027]: DEBUG nova.service [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Creating RPC server for service compute {{(pid=69027) start /opt/stack/nova/nova/service.py:182}} [ 532.019476] env[69027]: DEBUG nova.service [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] Join ServiceGroup membership for this service compute {{(pid=69027) start /opt/stack/nova/nova/service.py:199}} [ 532.019661] env[69027]: DEBUG nova.servicegroup.drivers.db [None req-6545ebf4-5e5b-42ad-ad3c-4d012918eebb None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69027) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 541.326449] env[69027]: DEBUG dbcounter [-] [69027] Writing DB stats nova_cell0:SELECT=1 {{(pid=69027) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 541.327115] env[69027]: DEBUG dbcounter [-] [69027] Writing DB stats nova_cell1:SELECT=1 {{(pid=69027) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 542.022511] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.032492] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Getting list of instances from cluster (obj){ [ 542.032492] env[69027]: value = "domain-c8" [ 542.032492] env[69027]: _type = "ClusterComputeResource" [ 542.032492] env[69027]: } {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 542.033667] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8ca3e1-2ab0-4518-bd77-e92151160074 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.042635] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Got total of 0 instances {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 542.042855] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 542.043178] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Getting list of instances from cluster (obj){ [ 542.043178] env[69027]: value = "domain-c8" [ 542.043178] env[69027]: _type = "ClusterComputeResource" [ 542.043178] env[69027]: } {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 542.044008] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10f46e7-0f5f-4f8c-a724-a4afe0cbfcb7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.051445] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Got total of 0 instances {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 570.584758] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquiring lock "f719053e-2753-49c6-b47d-5adb698cafac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.584758] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Lock "f719053e-2753-49c6-b47d-5adb698cafac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.614564] env[69027]: DEBUG nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 570.739057] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.739338] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.743525] env[69027]: INFO nova.compute.claims [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 570.913826] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3b8d2d-3820-4453-9094-4191655d195c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.924083] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90638811-d923-4f8b-95fd-1378321b764d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.965484] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56fd790-878d-4e43-b888-831e040df3a6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.974312] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3753050c-7341-4790-b014-b5b1e5442afb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.994146] env[69027]: DEBUG nova.compute.provider_tree [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.010921] env[69027]: DEBUG nova.scheduler.client.report [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 571.039077] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.299s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.039077] env[69027]: DEBUG nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 571.093795] env[69027]: DEBUG nova.compute.utils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 571.096411] env[69027]: DEBUG nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Not allocating networking since 'none' was specified. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 571.111764] env[69027]: DEBUG nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 571.197902] env[69027]: DEBUG nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 571.873031] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Acquiring lock "657170ea-02c4-4bc9-97d0-9aa3960fcaf9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.873745] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Lock "657170ea-02c4-4bc9-97d0-9aa3960fcaf9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.906658] env[69027]: DEBUG nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 571.992024] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.992325] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.997019] env[69027]: INFO nova.compute.claims [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 572.138833] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af9e4c8-e76e-4eae-b6f3-e719e7fd24ee {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.149732] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43713686-bea7-4970-9dc1-d48791371752 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.187929] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e22c582-ff4f-4846-bfd1-078fb3d498d7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.201022] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837813cc-1f08-40aa-8e29-0ab598b98337 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.213972] env[69027]: DEBUG nova.compute.provider_tree [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 572.229126] env[69027]: DEBUG nova.scheduler.client.report [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 572.246688] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.254s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.249447] env[69027]: DEBUG nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 572.306313] env[69027]: DEBUG nova.compute.utils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 572.308936] env[69027]: DEBUG nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 572.309283] env[69027]: DEBUG nova.network.neutron [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 572.325087] env[69027]: DEBUG nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 572.416417] env[69027]: DEBUG nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 573.284226] env[69027]: DEBUG nova.virt.hardware [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 573.284226] env[69027]: DEBUG nova.virt.hardware [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 573.284645] env[69027]: DEBUG nova.virt.hardware [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 573.284862] env[69027]: DEBUG nova.virt.hardware [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 573.285039] env[69027]: DEBUG nova.virt.hardware [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 573.285422] env[69027]: DEBUG nova.virt.hardware [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 573.285495] env[69027]: DEBUG nova.virt.hardware [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 573.285610] env[69027]: DEBUG nova.virt.hardware [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 573.286041] env[69027]: DEBUG nova.virt.hardware [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 573.286229] env[69027]: DEBUG nova.virt.hardware [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 573.286801] env[69027]: DEBUG nova.virt.hardware [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 573.289654] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a844efa1-3abc-4565-818c-7f7eceee935a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.308169] env[69027]: DEBUG nova.virt.hardware [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 573.308448] env[69027]: DEBUG nova.virt.hardware [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 573.309129] env[69027]: DEBUG nova.virt.hardware [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 573.309358] env[69027]: DEBUG nova.virt.hardware [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 573.309506] env[69027]: DEBUG nova.virt.hardware [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 573.309652] env[69027]: DEBUG nova.virt.hardware [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 573.310105] env[69027]: DEBUG nova.virt.hardware [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 573.310325] env[69027]: DEBUG nova.virt.hardware [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 573.310503] env[69027]: DEBUG nova.virt.hardware [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 573.310667] env[69027]: DEBUG nova.virt.hardware [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 573.310898] env[69027]: DEBUG nova.virt.hardware [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 573.312293] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6a5753-2cbe-42ec-b96f-9fe8fb7ad866 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.317303] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b8a0bf-3d68-43e4-a6ff-740c4bd62fc0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.348279] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73239bae-c19c-46ee-8772-c8e39c0d539f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.372818] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635cb2cc-d9ce-4780-8b6d-f5c8912275f1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.402039] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Instance VIF info [] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 573.416441] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 573.416897] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d84b0de-7309-40b3-9ca7-ed94db031533 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.432334] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Created folder: OpenStack in parent group-v4. [ 573.432516] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Creating folder: Project (f6c60fa30a5a4e72beac942ed15639d0). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 573.432786] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2fc100a-ee7d-434a-b92b-3a9a1960985e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.447234] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Created folder: Project (f6c60fa30a5a4e72beac942ed15639d0) in parent group-v677321. [ 573.447234] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Creating folder: Instances. Parent ref: group-v677322. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 573.447234] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1048c456-2fd9-4181-8e28-780d0fefeb34 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.458513] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Created folder: Instances in parent group-v677322. [ 573.459241] env[69027]: DEBUG oslo.service.loopingcall [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 573.459338] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 573.462076] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cd9395a-f0eb-4922-a320-bd729053c39c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.476815] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 573.476815] env[69027]: value = "task-3395052" [ 573.476815] env[69027]: _type = "Task" [ 573.476815] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.485561] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395052, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.644145] env[69027]: DEBUG nova.policy [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6ca598040408436f8eb2e2b0f07b229c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a05397ef54ec425f9747d2bd9f5e1f52', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 573.992022] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395052, 'name': CreateVM_Task, 'duration_secs': 0.341794} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.992022] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 573.992022] env[69027]: DEBUG oslo_vmware.service [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19806123-ff02-4662-b0c3-9fd3a9c74648 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.002136] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.002136] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.003088] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 574.003088] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c08c4e99-a1ce-4710-93f2-8cef9bcdcc55 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.009121] env[69027]: DEBUG oslo_vmware.api [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Waiting for the task: (returnval){ [ 574.009121] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52caf0f9-10d4-2b22-f1bd-d82998a314b4" [ 574.009121] env[69027]: _type = "Task" [ 574.009121] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.021483] env[69027]: DEBUG oslo_vmware.api [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52caf0f9-10d4-2b22-f1bd-d82998a314b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.527828] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 574.528196] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 574.528438] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 574.528675] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.532212] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 574.532212] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b0413db-727e-47ca-802c-759fc06492f2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.550221] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 574.550431] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 574.551698] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41801c99-652c-421f-9b07-1ee9dfad0431 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.562470] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e5fa0f3-e72f-44ca-8d86-fa9b01aaf9e1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.570030] env[69027]: DEBUG oslo_vmware.api [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Waiting for the task: (returnval){ [ 574.570030] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5261fc2f-3cae-0340-d33c-c49ae39c9dbf" [ 574.570030] env[69027]: _type = "Task" [ 574.570030] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.584498] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 574.584920] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Creating directory with path [datastore2] vmware_temp/45c407b6-4051-45bf-a54c-37e059c7984b/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 574.585349] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2be65d94-50a8-493d-b3ee-5746b3c79c55 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.617249] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Created directory with path [datastore2] vmware_temp/45c407b6-4051-45bf-a54c-37e059c7984b/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 574.617249] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Fetch image to [datastore2] vmware_temp/45c407b6-4051-45bf-a54c-37e059c7984b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 574.617249] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/45c407b6-4051-45bf-a54c-37e059c7984b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 574.619944] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c27135-4b6f-4d18-9c53-4910dd8d957b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.635045] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7684cf1d-bd96-465a-9561-2193957c71dc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.651606] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a9240d-adeb-46b6-b7a7-22053f8150f2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.707866] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177a8180-8d79-4489-8395-9ac096f24fd2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.716323] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-25779b58-8620-4c97-9ab0-59013d4509bf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.806401] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 574.909844] env[69027]: DEBUG oslo_vmware.rw_handles [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/45c407b6-4051-45bf-a54c-37e059c7984b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 574.979843] env[69027]: DEBUG oslo_vmware.rw_handles [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 574.980113] env[69027]: DEBUG oslo_vmware.rw_handles [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/45c407b6-4051-45bf-a54c-37e059c7984b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 576.100392] env[69027]: DEBUG nova.network.neutron [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Successfully created port: 98ddcc26-3581-48d3-85ed-1157212d318f {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 577.085381] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Acquiring lock "a26389e3-7c20-4227-bd57-9d058964edc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.085707] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Lock "a26389e3-7c20-4227-bd57-9d058964edc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.100122] env[69027]: DEBUG nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 577.159855] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.160261] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.161686] env[69027]: INFO nova.compute.claims [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 577.313235] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b772ee-2618-4e9a-aaa3-8beda976a60c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.322581] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea65301-0250-4289-8f1e-8de0174f55ec {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.356365] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b31365b-56a2-46f4-b4f2-ef72a285e458 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.364411] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b78578-9fd3-4be9-a1ec-4139b6880dac {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.379491] env[69027]: DEBUG nova.compute.provider_tree [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.392237] env[69027]: DEBUG nova.scheduler.client.report [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 577.419385] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.258s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 577.419593] env[69027]: DEBUG nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 577.467867] env[69027]: DEBUG nova.compute.utils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 577.469500] env[69027]: DEBUG nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 577.470077] env[69027]: DEBUG nova.network.neutron [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 577.487632] env[69027]: DEBUG nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 577.579528] env[69027]: DEBUG nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 577.614023] env[69027]: DEBUG nova.virt.hardware [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 577.614023] env[69027]: DEBUG nova.virt.hardware [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 577.614023] env[69027]: DEBUG nova.virt.hardware [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 577.614225] env[69027]: DEBUG nova.virt.hardware [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 577.614225] env[69027]: DEBUG nova.virt.hardware [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 577.614333] env[69027]: DEBUG nova.virt.hardware [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 577.614710] env[69027]: DEBUG nova.virt.hardware [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 577.615061] env[69027]: DEBUG nova.virt.hardware [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 577.615567] env[69027]: DEBUG nova.virt.hardware [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 577.616375] env[69027]: DEBUG nova.virt.hardware [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 577.616375] env[69027]: DEBUG nova.virt.hardware [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 577.617691] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a3eac3-b737-4dbe-874e-9cc00f721f48 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.630119] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8cef21-80b7-47c8-914b-fddc11231732 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.835531] env[69027]: DEBUG nova.policy [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8330579da5ba477eb46d3d6885d4fd9b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86e43b5af97244b29149a1fb7586fd6b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 578.334032] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Acquiring lock "2e16f12d-194a-47a5-824d-062a684a86f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.334032] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Lock "2e16f12d-194a-47a5-824d-062a684a86f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.343774] env[69027]: DEBUG nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 578.420500] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 578.420561] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.423351] env[69027]: INFO nova.compute.claims [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.591515] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b560fc83-1200-4f43-a3b0-41888f475ae4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.602591] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7a066a-d6bb-4bb4-ab7a-b42d8b6c338d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.635857] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd4e8b6-2cb6-415b-b488-d69d1d64a9bb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.643876] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47afdb55-0164-4568-8f3a-b1b51a4bc7d8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.661141] env[69027]: DEBUG nova.compute.provider_tree [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.673672] env[69027]: DEBUG nova.scheduler.client.report [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 578.697289] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.276s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.698448] env[69027]: DEBUG nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 578.754529] env[69027]: DEBUG nova.compute.utils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 578.756299] env[69027]: DEBUG nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 578.758502] env[69027]: DEBUG nova.network.neutron [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 578.783026] env[69027]: DEBUG nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 578.918242] env[69027]: DEBUG nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 578.969359] env[69027]: DEBUG nova.virt.hardware [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 578.969359] env[69027]: DEBUG nova.virt.hardware [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 578.969359] env[69027]: DEBUG nova.virt.hardware [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 578.969600] env[69027]: DEBUG nova.virt.hardware [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 578.969600] env[69027]: DEBUG nova.virt.hardware [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 578.969600] env[69027]: DEBUG nova.virt.hardware [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 578.969600] env[69027]: DEBUG nova.virt.hardware [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 578.969600] env[69027]: DEBUG nova.virt.hardware [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 578.969767] env[69027]: DEBUG nova.virt.hardware [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 578.969767] env[69027]: DEBUG nova.virt.hardware [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 578.969767] env[69027]: DEBUG nova.virt.hardware [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 578.969767] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c878ee9-e0c8-4fa4-b35b-c28d50e2f397 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.982423] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e6068a-35f9-4ad2-a52f-9bc38620ae6b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.227313] env[69027]: DEBUG nova.policy [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '068cd3b9f2f644efad9300ca9026ce1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25b9a42836034191a530a284e7aac17e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 580.332423] env[69027]: DEBUG nova.network.neutron [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Successfully created port: 153dafc4-19d5-470c-a94f-4ac0bdf04fdc {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 581.083785] env[69027]: DEBUG nova.network.neutron [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Successfully updated port: 98ddcc26-3581-48d3-85ed-1157212d318f {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 581.104829] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Acquiring lock "refresh_cache-657170ea-02c4-4bc9-97d0-9aa3960fcaf9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.105562] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Acquired lock "refresh_cache-657170ea-02c4-4bc9-97d0-9aa3960fcaf9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.105562] env[69027]: DEBUG nova.network.neutron [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 581.354199] env[69027]: DEBUG nova.network.neutron [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 582.087522] env[69027]: DEBUG nova.network.neutron [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Successfully created port: 4b364bfd-9e6a-4b38-b6d9-b8c1efff059b {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.972942] env[69027]: DEBUG nova.network.neutron [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Successfully updated port: 153dafc4-19d5-470c-a94f-4ac0bdf04fdc {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 583.002704] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Acquiring lock "refresh_cache-a26389e3-7c20-4227-bd57-9d058964edc5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.002704] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Acquired lock "refresh_cache-a26389e3-7c20-4227-bd57-9d058964edc5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.002704] env[69027]: DEBUG nova.network.neutron [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 583.424425] env[69027]: DEBUG nova.network.neutron [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 583.538367] env[69027]: DEBUG nova.network.neutron [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Updating instance_info_cache with network_info: [{"id": "98ddcc26-3581-48d3-85ed-1157212d318f", "address": "fa:16:3e:43:20:32", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98ddcc26-35", "ovs_interfaceid": "98ddcc26-3581-48d3-85ed-1157212d318f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.561661] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Releasing lock "refresh_cache-657170ea-02c4-4bc9-97d0-9aa3960fcaf9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.561661] env[69027]: DEBUG nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Instance network_info: |[{"id": "98ddcc26-3581-48d3-85ed-1157212d318f", "address": "fa:16:3e:43:20:32", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98ddcc26-35", "ovs_interfaceid": "98ddcc26-3581-48d3-85ed-1157212d318f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 583.561875] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:20:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98ddcc26-3581-48d3-85ed-1157212d318f', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 583.577046] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Creating folder: Project (a05397ef54ec425f9747d2bd9f5e1f52). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 583.579543] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-968100d0-8328-4968-8853-dfe76c6f9458 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.583602] env[69027]: DEBUG nova.compute.manager [req-4ea9f8f4-c3dd-4c4d-81ea-c677c2eac589 req-046933c1-6826-4ac6-853c-ad7b111c39ba service nova] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Received event network-vif-plugged-98ddcc26-3581-48d3-85ed-1157212d318f {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 583.583914] env[69027]: DEBUG oslo_concurrency.lockutils [req-4ea9f8f4-c3dd-4c4d-81ea-c677c2eac589 req-046933c1-6826-4ac6-853c-ad7b111c39ba service nova] Acquiring lock "657170ea-02c4-4bc9-97d0-9aa3960fcaf9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.584196] env[69027]: DEBUG oslo_concurrency.lockutils [req-4ea9f8f4-c3dd-4c4d-81ea-c677c2eac589 req-046933c1-6826-4ac6-853c-ad7b111c39ba service nova] Lock "657170ea-02c4-4bc9-97d0-9aa3960fcaf9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.584435] env[69027]: DEBUG oslo_concurrency.lockutils [req-4ea9f8f4-c3dd-4c4d-81ea-c677c2eac589 req-046933c1-6826-4ac6-853c-ad7b111c39ba service nova] Lock "657170ea-02c4-4bc9-97d0-9aa3960fcaf9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.585142] env[69027]: DEBUG nova.compute.manager [req-4ea9f8f4-c3dd-4c4d-81ea-c677c2eac589 req-046933c1-6826-4ac6-853c-ad7b111c39ba service nova] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] No waiting events found dispatching network-vif-plugged-98ddcc26-3581-48d3-85ed-1157212d318f {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 583.585142] env[69027]: WARNING nova.compute.manager [req-4ea9f8f4-c3dd-4c4d-81ea-c677c2eac589 req-046933c1-6826-4ac6-853c-ad7b111c39ba service nova] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Received unexpected event network-vif-plugged-98ddcc26-3581-48d3-85ed-1157212d318f for instance with vm_state building and task_state spawning. [ 583.603147] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Created folder: Project (a05397ef54ec425f9747d2bd9f5e1f52) in parent group-v677321. [ 583.603350] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Creating folder: Instances. Parent ref: group-v677325. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 583.603923] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-652e2093-48b0-42bd-b1aa-a2dd086bc5b0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.614527] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Created folder: Instances in parent group-v677325. [ 583.614769] env[69027]: DEBUG oslo.service.loopingcall [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 583.614965] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 583.615189] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7244d168-59fd-496a-81e7-31cb20cc8b98 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.642621] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 583.642621] env[69027]: value = "task-3395055" [ 583.642621] env[69027]: _type = "Task" [ 583.642621] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.652079] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395055, 'name': CreateVM_Task} progress is 5%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.157204] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395055, 'name': CreateVM_Task, 'duration_secs': 0.312987} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.157204] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 584.193560] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.193560] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.193850] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 584.194142] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14f4708b-e332-4764-b38d-9b57af6acfe0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.204896] env[69027]: DEBUG oslo_vmware.api [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Waiting for the task: (returnval){ [ 584.204896] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]528bdbbe-c445-50e8-34a5-668eda71a46e" [ 584.204896] env[69027]: _type = "Task" [ 584.204896] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.216927] env[69027]: DEBUG oslo_vmware.api [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]528bdbbe-c445-50e8-34a5-668eda71a46e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.451370] env[69027]: DEBUG nova.network.neutron [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Updating instance_info_cache with network_info: [{"id": "153dafc4-19d5-470c-a94f-4ac0bdf04fdc", "address": "fa:16:3e:a0:0d:db", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap153dafc4-19", "ovs_interfaceid": "153dafc4-19d5-470c-a94f-4ac0bdf04fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.469014] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Releasing lock "refresh_cache-a26389e3-7c20-4227-bd57-9d058964edc5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.469014] env[69027]: DEBUG nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Instance network_info: |[{"id": "153dafc4-19d5-470c-a94f-4ac0bdf04fdc", "address": "fa:16:3e:a0:0d:db", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap153dafc4-19", "ovs_interfaceid": "153dafc4-19d5-470c-a94f-4ac0bdf04fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 584.469237] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:0d:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '153dafc4-19d5-470c-a94f-4ac0bdf04fdc', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 584.475769] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Creating folder: Project (86e43b5af97244b29149a1fb7586fd6b). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 584.476707] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52d67f33-86af-4cb5-ac0e-4bd68afcd819 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.490781] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Created folder: Project (86e43b5af97244b29149a1fb7586fd6b) in parent group-v677321. [ 584.490781] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Creating folder: Instances. Parent ref: group-v677328. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 584.491258] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d7f0b28-8f0c-4b4f-8b61-a099647368b0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.504541] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Created folder: Instances in parent group-v677328. [ 584.504945] env[69027]: DEBUG oslo.service.loopingcall [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 584.505650] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 584.505650] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a6cece9-8bb4-4473-a9d2-afd558392852 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.525757] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 584.525757] env[69027]: value = "task-3395058" [ 584.525757] env[69027]: _type = "Task" [ 584.525757] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.535033] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395058, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.716638] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.716902] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 584.717463] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.876070] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquiring lock "4ad409c8-465f-4106-946a-7f401358d5a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.876383] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Lock "4ad409c8-465f-4106-946a-7f401358d5a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.894095] env[69027]: DEBUG nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 584.952310] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.952492] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.954819] env[69027]: INFO nova.compute.claims [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 585.037216] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395058, 'name': CreateVM_Task, 'duration_secs': 0.32741} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.037216] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 585.037216] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.037216] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.037391] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 585.038750] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e91c3387-730f-43db-a90a-cb207a7181ec {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.050108] env[69027]: DEBUG oslo_vmware.api [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Waiting for the task: (returnval){ [ 585.050108] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]520683fc-09e3-9c5d-5968-a8d1fdf8b1d6" [ 585.050108] env[69027]: _type = "Task" [ 585.050108] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.058752] env[69027]: DEBUG oslo_vmware.api [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]520683fc-09e3-9c5d-5968-a8d1fdf8b1d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.121078] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1420beb4-dff8-4ff7-965a-ed6bc8225fe7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.128369] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649aad64-cc07-4d51-8f7a-91994ad1bd62 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.166458] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac799c2-3ef4-416a-ad9c-1f8ec30533b1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.174107] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7651c0a3-1c9a-4e87-acc2-70a6994728bf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.187713] env[69027]: DEBUG nova.compute.provider_tree [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.200318] env[69027]: DEBUG nova.scheduler.client.report [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 585.220397] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.268s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.220843] env[69027]: DEBUG nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 585.273345] env[69027]: DEBUG nova.compute.utils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 585.274665] env[69027]: DEBUG nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 585.274830] env[69027]: DEBUG nova.network.neutron [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 585.303942] env[69027]: DEBUG nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 585.312281] env[69027]: DEBUG nova.compute.manager [req-f6b8e5be-7347-420f-8093-6ad7953d4427 req-660ae293-a68b-4b3a-81d6-a68d005e07a9 service nova] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Received event network-vif-plugged-153dafc4-19d5-470c-a94f-4ac0bdf04fdc {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 585.312281] env[69027]: DEBUG oslo_concurrency.lockutils [req-f6b8e5be-7347-420f-8093-6ad7953d4427 req-660ae293-a68b-4b3a-81d6-a68d005e07a9 service nova] Acquiring lock "a26389e3-7c20-4227-bd57-9d058964edc5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.312281] env[69027]: DEBUG oslo_concurrency.lockutils [req-f6b8e5be-7347-420f-8093-6ad7953d4427 req-660ae293-a68b-4b3a-81d6-a68d005e07a9 service nova] Lock "a26389e3-7c20-4227-bd57-9d058964edc5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.312281] env[69027]: DEBUG oslo_concurrency.lockutils [req-f6b8e5be-7347-420f-8093-6ad7953d4427 req-660ae293-a68b-4b3a-81d6-a68d005e07a9 service nova] Lock "a26389e3-7c20-4227-bd57-9d058964edc5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.312411] env[69027]: DEBUG nova.compute.manager [req-f6b8e5be-7347-420f-8093-6ad7953d4427 req-660ae293-a68b-4b3a-81d6-a68d005e07a9 service nova] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] No waiting events found dispatching network-vif-plugged-153dafc4-19d5-470c-a94f-4ac0bdf04fdc {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 585.312411] env[69027]: WARNING nova.compute.manager [req-f6b8e5be-7347-420f-8093-6ad7953d4427 req-660ae293-a68b-4b3a-81d6-a68d005e07a9 service nova] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Received unexpected event network-vif-plugged-153dafc4-19d5-470c-a94f-4ac0bdf04fdc for instance with vm_state building and task_state spawning. [ 585.397037] env[69027]: DEBUG nova.policy [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40fbd1dcad8b47209b2f97887c1d4d8e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '795a02370bc043c4a3c51663ce48a550', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 585.407087] env[69027]: DEBUG nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 585.454733] env[69027]: DEBUG nova.virt.hardware [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 585.454992] env[69027]: DEBUG nova.virt.hardware [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 585.455161] env[69027]: DEBUG nova.virt.hardware [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 585.455345] env[69027]: DEBUG nova.virt.hardware [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 585.455487] env[69027]: DEBUG nova.virt.hardware [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 585.455630] env[69027]: DEBUG nova.virt.hardware [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 585.455831] env[69027]: DEBUG nova.virt.hardware [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 585.455987] env[69027]: DEBUG nova.virt.hardware [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 585.456167] env[69027]: DEBUG nova.virt.hardware [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 585.456325] env[69027]: DEBUG nova.virt.hardware [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 585.456489] env[69027]: DEBUG nova.virt.hardware [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 585.457812] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3c6984-3de6-414f-a406-3b8db3378793 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.470322] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a3d9f4-c0c5-4713-8e7d-3377e5ac8e16 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.565712] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.566012] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 585.566253] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.555430] env[69027]: DEBUG nova.network.neutron [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Successfully created port: ef3495b9-d03f-4d09-b3cd-a4d666267c1a {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 586.681939] env[69027]: DEBUG nova.network.neutron [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Successfully updated port: 4b364bfd-9e6a-4b38-b6d9-b8c1efff059b {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 586.700618] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Acquiring lock "refresh_cache-2e16f12d-194a-47a5-824d-062a684a86f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.700618] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Acquired lock "refresh_cache-2e16f12d-194a-47a5-824d-062a684a86f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.700798] env[69027]: DEBUG nova.network.neutron [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 586.858585] env[69027]: DEBUG nova.network.neutron [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.486257] env[69027]: DEBUG nova.compute.manager [req-ce6694eb-3e67-4850-90f7-5bb664c88f27 req-20becdc9-2b36-4c35-971d-598d4c78778b service nova] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Received event network-changed-98ddcc26-3581-48d3-85ed-1157212d318f {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 587.486257] env[69027]: DEBUG nova.compute.manager [req-ce6694eb-3e67-4850-90f7-5bb664c88f27 req-20becdc9-2b36-4c35-971d-598d4c78778b service nova] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Refreshing instance network info cache due to event network-changed-98ddcc26-3581-48d3-85ed-1157212d318f. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 587.486257] env[69027]: DEBUG oslo_concurrency.lockutils [req-ce6694eb-3e67-4850-90f7-5bb664c88f27 req-20becdc9-2b36-4c35-971d-598d4c78778b service nova] Acquiring lock "refresh_cache-657170ea-02c4-4bc9-97d0-9aa3960fcaf9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 587.486257] env[69027]: DEBUG oslo_concurrency.lockutils [req-ce6694eb-3e67-4850-90f7-5bb664c88f27 req-20becdc9-2b36-4c35-971d-598d4c78778b service nova] Acquired lock "refresh_cache-657170ea-02c4-4bc9-97d0-9aa3960fcaf9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 587.486257] env[69027]: DEBUG nova.network.neutron [req-ce6694eb-3e67-4850-90f7-5bb664c88f27 req-20becdc9-2b36-4c35-971d-598d4c78778b service nova] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Refreshing network info cache for port 98ddcc26-3581-48d3-85ed-1157212d318f {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 587.781409] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.781409] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.781409] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 587.781409] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 587.811157] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 587.811157] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 587.811157] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 587.811157] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 587.811157] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 587.811558] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 587.811558] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.812325] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.812512] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.813280] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.813799] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.814034] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.814233] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 587.814409] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 587.831684] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.831882] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.832078] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.832211] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 587.833359] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1345c6-e452-4350-aaf3-618a973b1e34 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.846802] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1090df88-36cb-4049-a995-1767c31a699a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.863505] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5519df76-7bc0-4883-9c13-30225a7bebd7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.870629] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306d5b80-9633-442a-ba1a-6cd702fb31f1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.901275] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180988MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 587.901430] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.901752] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 587.911887] env[69027]: DEBUG nova.network.neutron [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Updating instance_info_cache with network_info: [{"id": "4b364bfd-9e6a-4b38-b6d9-b8c1efff059b", "address": "fa:16:3e:2f:ed:b9", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.115", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b364bfd-9e", "ovs_interfaceid": "4b364bfd-9e6a-4b38-b6d9-b8c1efff059b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.927743] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Releasing lock "refresh_cache-2e16f12d-194a-47a5-824d-062a684a86f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 587.928452] env[69027]: DEBUG nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Instance network_info: |[{"id": "4b364bfd-9e6a-4b38-b6d9-b8c1efff059b", "address": "fa:16:3e:2f:ed:b9", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.115", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b364bfd-9e", "ovs_interfaceid": "4b364bfd-9e6a-4b38-b6d9-b8c1efff059b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 587.929379] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:ed:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b364bfd-9e6a-4b38-b6d9-b8c1efff059b', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 587.938728] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Creating folder: Project (25b9a42836034191a530a284e7aac17e). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 587.939855] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e7a6982-981c-41b6-b276-a2fa5c7f3529 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.957564] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Created folder: Project (25b9a42836034191a530a284e7aac17e) in parent group-v677321. [ 587.958193] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Creating folder: Instances. Parent ref: group-v677331. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 587.958193] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58fd2e29-be2b-4081-b2ae-ad4d1282e25a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.967882] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Created folder: Instances in parent group-v677331. [ 587.967882] env[69027]: DEBUG oslo.service.loopingcall [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 587.967882] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 587.967882] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-954e4db0-97d4-41eb-8b35-a2893733d219 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.992232] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 587.992232] env[69027]: value = "task-3395061" [ 587.992232] env[69027]: _type = "Task" [ 587.992232] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.000732] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395061, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.012423] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f719053e-2753-49c6-b47d-5adb698cafac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.012587] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 657170ea-02c4-4bc9-97d0-9aa3960fcaf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.012714] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a26389e3-7c20-4227-bd57-9d058964edc5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.012831] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 2e16f12d-194a-47a5-824d-062a684a86f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.012946] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ad409c8-465f-4106-946a-7f401358d5a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.013204] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 588.013380] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 588.103144] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b25b0b-294e-429d-b91f-8e39a34d1bbf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.111070] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5cb0ef9-3565-43dc-be1a-b5cf9af090bc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.143077] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffe3503-322b-4f59-80b7-9cb9d6e7f58a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.150695] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553efb5b-ce70-489c-beab-e5aff781f452 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.165483] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.174971] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 588.194937] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 588.195225] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.293s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 588.268018] env[69027]: DEBUG nova.network.neutron [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Successfully updated port: ef3495b9-d03f-4d09-b3cd-a4d666267c1a {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 588.284285] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquiring lock "refresh_cache-4ad409c8-465f-4106-946a-7f401358d5a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.284849] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquired lock "refresh_cache-4ad409c8-465f-4106-946a-7f401358d5a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.285254] env[69027]: DEBUG nova.network.neutron [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 588.381353] env[69027]: DEBUG nova.network.neutron [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.512480] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395061, 'name': CreateVM_Task, 'duration_secs': 0.31527} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.512480] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 588.512663] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.513010] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.513373] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 588.513650] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09ab90de-1013-42e8-ba85-dbc08eb2142d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.519060] env[69027]: DEBUG oslo_vmware.api [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Waiting for the task: (returnval){ [ 588.519060] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52cc4b76-e675-ff8c-3a18-389df7e85b79" [ 588.519060] env[69027]: _type = "Task" [ 588.519060] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.531484] env[69027]: DEBUG oslo_vmware.api [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52cc4b76-e675-ff8c-3a18-389df7e85b79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.744315] env[69027]: DEBUG nova.network.neutron [req-ce6694eb-3e67-4850-90f7-5bb664c88f27 req-20becdc9-2b36-4c35-971d-598d4c78778b service nova] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Updated VIF entry in instance network info cache for port 98ddcc26-3581-48d3-85ed-1157212d318f. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 588.744705] env[69027]: DEBUG nova.network.neutron [req-ce6694eb-3e67-4850-90f7-5bb664c88f27 req-20becdc9-2b36-4c35-971d-598d4c78778b service nova] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Updating instance_info_cache with network_info: [{"id": "98ddcc26-3581-48d3-85ed-1157212d318f", "address": "fa:16:3e:43:20:32", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.40", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98ddcc26-35", "ovs_interfaceid": "98ddcc26-3581-48d3-85ed-1157212d318f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.761512] env[69027]: DEBUG oslo_concurrency.lockutils [req-ce6694eb-3e67-4850-90f7-5bb664c88f27 req-20becdc9-2b36-4c35-971d-598d4c78778b service nova] Releasing lock "refresh_cache-657170ea-02c4-4bc9-97d0-9aa3960fcaf9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.870971] env[69027]: DEBUG nova.network.neutron [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Updating instance_info_cache with network_info: [{"id": "ef3495b9-d03f-4d09-b3cd-a4d666267c1a", "address": "fa:16:3e:08:7f:1a", "network": {"id": "69b7884b-5cab-46d9-8b36-f3724a130d68", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1324551187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "795a02370bc043c4a3c51663ce48a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef3495b9-d0", "ovs_interfaceid": "ef3495b9-d03f-4d09-b3cd-a4d666267c1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.887176] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Releasing lock "refresh_cache-4ad409c8-465f-4106-946a-7f401358d5a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.887176] env[69027]: DEBUG nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Instance network_info: |[{"id": "ef3495b9-d03f-4d09-b3cd-a4d666267c1a", "address": "fa:16:3e:08:7f:1a", "network": {"id": "69b7884b-5cab-46d9-8b36-f3724a130d68", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1324551187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "795a02370bc043c4a3c51663ce48a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef3495b9-d0", "ovs_interfaceid": "ef3495b9-d03f-4d09-b3cd-a4d666267c1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 588.887338] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:7f:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca50cd14-9e1f-4d74-a066-e5a45ba0ce22', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef3495b9-d03f-4d09-b3cd-a4d666267c1a', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 588.900861] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Creating folder: Project (795a02370bc043c4a3c51663ce48a550). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 588.900861] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-19618cfa-a8d6-4e23-b433-63240cc26e26 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.912861] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Created folder: Project (795a02370bc043c4a3c51663ce48a550) in parent group-v677321. [ 588.912861] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Creating folder: Instances. Parent ref: group-v677334. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 588.912861] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf5c9582-3333-4d45-9b90-20f13ccb93a0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.925181] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Created folder: Instances in parent group-v677334. [ 588.925441] env[69027]: DEBUG oslo.service.loopingcall [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 588.925937] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 588.926279] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-698ce6c5-f791-4a4d-b38a-f728d21db34a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.955316] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 588.955316] env[69027]: value = "task-3395064" [ 588.955316] env[69027]: _type = "Task" [ 588.955316] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.966515] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395064, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.035994] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.035994] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 589.036344] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.334447] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquiring lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.334726] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.349800] env[69027]: DEBUG nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 589.393791] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "9ff1dac6-b328-42c3-babe-86aef27466c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.396075] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "9ff1dac6-b328-42c3-babe-86aef27466c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.407746] env[69027]: DEBUG nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 589.417414] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.417935] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.423011] env[69027]: INFO nova.compute.claims [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.471825] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395064, 'name': CreateVM_Task, 'duration_secs': 0.388586} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.471877] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 589.474570] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 589.474570] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.477923] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 589.477923] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d280d88e-4120-4e93-929f-28f4652c5f0f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.484572] env[69027]: DEBUG oslo_vmware.api [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Waiting for the task: (returnval){ [ 589.484572] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5249e0b2-29db-9f7d-828b-24c42a932ada" [ 589.484572] env[69027]: _type = "Task" [ 589.484572] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.496634] env[69027]: DEBUG oslo_vmware.api [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5249e0b2-29db-9f7d-828b-24c42a932ada, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.497862] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.668369] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a017ba7-6cbe-4820-80ae-8bdd6d280ea4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.679165] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5251df0f-51ea-4359-b6ed-47b213a16ef7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.712751] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecaa0d6-1199-4499-a47d-196ddf56c90c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.721243] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3010546f-8000-499e-b837-eb224aaa3698 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.734853] env[69027]: DEBUG nova.compute.provider_tree [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.752209] env[69027]: DEBUG nova.scheduler.client.report [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 589.774141] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.354s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.774141] env[69027]: DEBUG nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 589.775667] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.278s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.777522] env[69027]: INFO nova.compute.claims [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.833358] env[69027]: DEBUG nova.compute.utils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 589.836315] env[69027]: DEBUG nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 589.837146] env[69027]: DEBUG nova.network.neutron [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 589.849018] env[69027]: DEBUG nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 589.999290] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 590.001133] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 590.004794] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.011920] env[69027]: DEBUG nova.policy [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8891d08d86c472090183a0a1b95c204', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b8059f86ca2e4b5dbd7a6b46b8e672fe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 590.020335] env[69027]: DEBUG nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 590.057870] env[69027]: DEBUG nova.virt.hardware [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 590.058204] env[69027]: DEBUG nova.virt.hardware [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 590.059069] env[69027]: DEBUG nova.virt.hardware [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 590.060045] env[69027]: DEBUG nova.virt.hardware [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 590.060045] env[69027]: DEBUG nova.virt.hardware [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 590.060154] env[69027]: DEBUG nova.virt.hardware [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 590.060462] env[69027]: DEBUG nova.virt.hardware [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 590.060628] env[69027]: DEBUG nova.virt.hardware [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 590.060801] env[69027]: DEBUG nova.virt.hardware [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 590.064015] env[69027]: DEBUG nova.virt.hardware [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 590.064015] env[69027]: DEBUG nova.virt.hardware [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 590.064662] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317b74d9-b755-4198-a4c7-3c861874c82c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.076266] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdcdd9f-ae1a-4675-b9be-76c7f0b65531 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.118776] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6413b43-9e84-4ca1-a1d9-583179488598 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.135677] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5363f3d0-d6ee-4755-82fa-bc6d5fb46434 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.190728] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fdd6e9-9c4a-484b-b68a-296f0355f3d8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.198590] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03135fd-547d-4000-95af-6f30c22ce289 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.217491] env[69027]: DEBUG nova.compute.provider_tree [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.232923] env[69027]: DEBUG nova.scheduler.client.report [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 590.256015] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.480s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.256806] env[69027]: DEBUG nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 590.294606] env[69027]: DEBUG nova.compute.manager [req-2592c81d-9986-4c70-9423-5161456b7829 req-d590d764-4d90-43ab-ae47-dc30382ff9b5 service nova] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Received event network-changed-153dafc4-19d5-470c-a94f-4ac0bdf04fdc {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 590.294606] env[69027]: DEBUG nova.compute.manager [req-2592c81d-9986-4c70-9423-5161456b7829 req-d590d764-4d90-43ab-ae47-dc30382ff9b5 service nova] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Refreshing instance network info cache due to event network-changed-153dafc4-19d5-470c-a94f-4ac0bdf04fdc. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 590.294606] env[69027]: DEBUG oslo_concurrency.lockutils [req-2592c81d-9986-4c70-9423-5161456b7829 req-d590d764-4d90-43ab-ae47-dc30382ff9b5 service nova] Acquiring lock "refresh_cache-a26389e3-7c20-4227-bd57-9d058964edc5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.294606] env[69027]: DEBUG oslo_concurrency.lockutils [req-2592c81d-9986-4c70-9423-5161456b7829 req-d590d764-4d90-43ab-ae47-dc30382ff9b5 service nova] Acquired lock "refresh_cache-a26389e3-7c20-4227-bd57-9d058964edc5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.294827] env[69027]: DEBUG nova.network.neutron [req-2592c81d-9986-4c70-9423-5161456b7829 req-d590d764-4d90-43ab-ae47-dc30382ff9b5 service nova] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Refreshing network info cache for port 153dafc4-19d5-470c-a94f-4ac0bdf04fdc {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 590.310910] env[69027]: DEBUG nova.compute.utils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 590.312155] env[69027]: DEBUG nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 590.312349] env[69027]: DEBUG nova.network.neutron [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 590.323162] env[69027]: DEBUG nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 590.360524] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquiring lock "aec054f1-0d52-49be-9dee-8db0ae362f12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.360524] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Lock "aec054f1-0d52-49be-9dee-8db0ae362f12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.376991] env[69027]: DEBUG nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 590.420474] env[69027]: DEBUG nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 590.455275] env[69027]: DEBUG nova.virt.hardware [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 590.455513] env[69027]: DEBUG nova.virt.hardware [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 590.456201] env[69027]: DEBUG nova.virt.hardware [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 590.456201] env[69027]: DEBUG nova.virt.hardware [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 590.456546] env[69027]: DEBUG nova.virt.hardware [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 590.456756] env[69027]: DEBUG nova.virt.hardware [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 590.456987] env[69027]: DEBUG nova.virt.hardware [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 590.457181] env[69027]: DEBUG nova.virt.hardware [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 590.457350] env[69027]: DEBUG nova.virt.hardware [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 590.457510] env[69027]: DEBUG nova.virt.hardware [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 590.457680] env[69027]: DEBUG nova.virt.hardware [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 590.458593] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b38815-b4cb-4f14-b9f1-30c8de2debca {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.462486] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.462714] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 590.465295] env[69027]: INFO nova.compute.claims [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.476565] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50ff8ca-2181-4ec3-941a-a538830892c8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.725889] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ed4778-7c67-4aae-ac0e-634d6c4a1503 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.736574] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ade0ba-100d-42c1-9442-61307ae52177 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.772406] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13967937-f1dc-406a-a8db-9b46efbe097b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.784522] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42dd9650-fc95-4139-aba5-b93f47c66c4f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.802701] env[69027]: DEBUG nova.compute.provider_tree [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.825283] env[69027]: DEBUG nova.scheduler.client.report [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 590.845663] env[69027]: DEBUG nova.policy [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd31e55f937a84ec1a8868ccd1cfc6c6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9dec99d89ef4ee3a43b5242fced36a8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 590.850075] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.387s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.851817] env[69027]: DEBUG nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 590.917658] env[69027]: DEBUG nova.compute.utils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 590.919693] env[69027]: DEBUG nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 590.920191] env[69027]: DEBUG nova.network.neutron [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 590.938454] env[69027]: DEBUG nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 591.047304] env[69027]: DEBUG nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 591.066482] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.066482] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.087827] env[69027]: DEBUG nova.virt.hardware [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 591.088133] env[69027]: DEBUG nova.virt.hardware [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 591.088308] env[69027]: DEBUG nova.virt.hardware [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.088490] env[69027]: DEBUG nova.virt.hardware [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 591.088633] env[69027]: DEBUG nova.virt.hardware [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.088778] env[69027]: DEBUG nova.virt.hardware [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 591.088987] env[69027]: DEBUG nova.virt.hardware [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 591.089209] env[69027]: DEBUG nova.virt.hardware [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 591.089423] env[69027]: DEBUG nova.virt.hardware [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 591.089610] env[69027]: DEBUG nova.virt.hardware [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 591.090306] env[69027]: DEBUG nova.virt.hardware [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 591.094118] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d4a588-3b31-492c-aa24-abfe9ed5e45a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.099762] env[69027]: DEBUG nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 591.109483] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b47536a-03d4-4c60-82a4-15bd496573e5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.115581] env[69027]: DEBUG nova.policy [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ff6e4c637d414a0fa28ac2d3d3070f5d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0edf1fbe4d4a438db0a192b02dbbdddd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 591.197213] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 591.197213] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.197838] env[69027]: INFO nova.compute.claims [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 591.368273] env[69027]: DEBUG nova.network.neutron [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Successfully created port: 1d8b922a-b400-466a-a5c8-05b027a503e5 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.462279] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7451b3cd-2125-4c66-a1fa-62e06527aa7b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.472367] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af7b66d-d3b8-4964-bec5-9fec9e62f294 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.510833] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb850370-75cb-4c7d-94f2-d7511f2921a0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.522662] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f760936-1d58-4caa-9560-dd978b6a23a1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.538429] env[69027]: DEBUG nova.compute.provider_tree [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.549985] env[69027]: DEBUG nova.scheduler.client.report [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 591.570873] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.374s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.571564] env[69027]: DEBUG nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 591.633071] env[69027]: DEBUG nova.compute.utils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 591.634717] env[69027]: DEBUG nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 591.635354] env[69027]: DEBUG nova.network.neutron [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 591.648444] env[69027]: DEBUG nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 591.730151] env[69027]: DEBUG nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 591.757370] env[69027]: DEBUG nova.virt.hardware [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 591.757624] env[69027]: DEBUG nova.virt.hardware [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 591.757783] env[69027]: DEBUG nova.virt.hardware [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.757965] env[69027]: DEBUG nova.virt.hardware [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 591.758146] env[69027]: DEBUG nova.virt.hardware [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.758328] env[69027]: DEBUG nova.virt.hardware [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 591.758526] env[69027]: DEBUG nova.virt.hardware [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 591.758678] env[69027]: DEBUG nova.virt.hardware [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 591.758854] env[69027]: DEBUG nova.virt.hardware [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 591.759009] env[69027]: DEBUG nova.virt.hardware [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 591.759203] env[69027]: DEBUG nova.virt.hardware [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 591.760582] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63f02f0-77ab-4816-8bf0-0c669977dd24 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.769962] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70cc16f8-5543-437b-8f46-81cfd119507e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.793296] env[69027]: DEBUG nova.network.neutron [req-2592c81d-9986-4c70-9423-5161456b7829 req-d590d764-4d90-43ab-ae47-dc30382ff9b5 service nova] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Updated VIF entry in instance network info cache for port 153dafc4-19d5-470c-a94f-4ac0bdf04fdc. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 591.793678] env[69027]: DEBUG nova.network.neutron [req-2592c81d-9986-4c70-9423-5161456b7829 req-d590d764-4d90-43ab-ae47-dc30382ff9b5 service nova] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Updating instance_info_cache with network_info: [{"id": "153dafc4-19d5-470c-a94f-4ac0bdf04fdc", "address": "fa:16:3e:a0:0d:db", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap153dafc4-19", "ovs_interfaceid": "153dafc4-19d5-470c-a94f-4ac0bdf04fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.807380] env[69027]: DEBUG oslo_concurrency.lockutils [req-2592c81d-9986-4c70-9423-5161456b7829 req-d590d764-4d90-43ab-ae47-dc30382ff9b5 service nova] Releasing lock "refresh_cache-a26389e3-7c20-4227-bd57-9d058964edc5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.975338] env[69027]: DEBUG nova.policy [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec4410b37a464672bb924564b24a02c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08b65d39d1924efc9d2dbd4ed09e43e7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 592.588519] env[69027]: DEBUG nova.network.neutron [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Successfully created port: 06b4aa22-a172-428a-8648-3a0c62009398 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 592.915579] env[69027]: DEBUG nova.network.neutron [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Successfully created port: 1c2fb916-4323-4348-8815-a12d0b82716b {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 593.316874] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "7c4204b8-2858-43a5-855d-c99b00e91d0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.317172] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "7c4204b8-2858-43a5-855d-c99b00e91d0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.329581] env[69027]: DEBUG nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 593.391715] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.392101] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.393516] env[69027]: INFO nova.compute.claims [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.678749] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2819d8b2-07b8-4bd2-b113-3b99b964afef {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.682552] env[69027]: DEBUG nova.network.neutron [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Successfully created port: 7eb9f713-efec-4fc6-a124-93669bf3f39b {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 593.692022] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0829465-4014-4e45-9f7b-955445faeb15 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.730176] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92cf826-51ac-44a9-a7d5-873080b84bdf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.735876] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0dd640-ea91-46c3-b86d-28b2905e2736 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.751591] env[69027]: DEBUG nova.compute.provider_tree [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.766306] env[69027]: DEBUG nova.scheduler.client.report [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 593.793491] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.401s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.795000] env[69027]: DEBUG nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 593.854422] env[69027]: DEBUG nova.compute.utils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 593.855791] env[69027]: DEBUG nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 593.855970] env[69027]: DEBUG nova.network.neutron [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 593.883616] env[69027]: DEBUG nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 593.989270] env[69027]: DEBUG nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 594.021415] env[69027]: DEBUG nova.virt.hardware [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 594.022018] env[69027]: DEBUG nova.virt.hardware [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 594.024561] env[69027]: DEBUG nova.virt.hardware [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 594.024933] env[69027]: DEBUG nova.virt.hardware [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 594.025495] env[69027]: DEBUG nova.virt.hardware [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 594.025634] env[69027]: DEBUG nova.virt.hardware [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 594.025925] env[69027]: DEBUG nova.virt.hardware [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 594.026231] env[69027]: DEBUG nova.virt.hardware [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 594.026454] env[69027]: DEBUG nova.virt.hardware [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 594.026652] env[69027]: DEBUG nova.virt.hardware [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 594.026934] env[69027]: DEBUG nova.virt.hardware [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 594.028308] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33285dfe-e646-4982-b9a5-3716474a0309 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.037671] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb76fef0-a490-46ea-9069-f0cdbbc06d77 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.060572] env[69027]: DEBUG nova.policy [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6ed005a4359c41209b43f043c98ad0ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a319a0627bf40138abe3c293c111dbb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 594.491116] env[69027]: DEBUG nova.compute.manager [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Received event network-vif-plugged-4b364bfd-9e6a-4b38-b6d9-b8c1efff059b {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 594.491343] env[69027]: DEBUG oslo_concurrency.lockutils [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] Acquiring lock "2e16f12d-194a-47a5-824d-062a684a86f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.491548] env[69027]: DEBUG oslo_concurrency.lockutils [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] Lock "2e16f12d-194a-47a5-824d-062a684a86f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.491715] env[69027]: DEBUG oslo_concurrency.lockutils [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] Lock "2e16f12d-194a-47a5-824d-062a684a86f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 594.491918] env[69027]: DEBUG nova.compute.manager [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] No waiting events found dispatching network-vif-plugged-4b364bfd-9e6a-4b38-b6d9-b8c1efff059b {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 594.492924] env[69027]: WARNING nova.compute.manager [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Received unexpected event network-vif-plugged-4b364bfd-9e6a-4b38-b6d9-b8c1efff059b for instance with vm_state building and task_state spawning. [ 594.493184] env[69027]: DEBUG nova.compute.manager [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Received event network-changed-4b364bfd-9e6a-4b38-b6d9-b8c1efff059b {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 594.493348] env[69027]: DEBUG nova.compute.manager [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Refreshing instance network info cache due to event network-changed-4b364bfd-9e6a-4b38-b6d9-b8c1efff059b. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 594.493586] env[69027]: DEBUG oslo_concurrency.lockutils [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] Acquiring lock "refresh_cache-2e16f12d-194a-47a5-824d-062a684a86f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.493684] env[69027]: DEBUG oslo_concurrency.lockutils [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] Acquired lock "refresh_cache-2e16f12d-194a-47a5-824d-062a684a86f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.493843] env[69027]: DEBUG nova.network.neutron [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Refreshing network info cache for port 4b364bfd-9e6a-4b38-b6d9-b8c1efff059b {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 595.094398] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "339bab90-238a-47ab-89f5-1ff9541ec14d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.094833] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "339bab90-238a-47ab-89f5-1ff9541ec14d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.409816] env[69027]: DEBUG nova.network.neutron [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Successfully updated port: 1d8b922a-b400-466a-a5c8-05b027a503e5 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 595.421465] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquiring lock "refresh_cache-ae5e2ca1-75e2-4023-b297-4cc265f038e5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.421610] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquired lock "refresh_cache-ae5e2ca1-75e2-4023-b297-4cc265f038e5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.421759] env[69027]: DEBUG nova.network.neutron [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 595.667345] env[69027]: DEBUG nova.network.neutron [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 595.867550] env[69027]: DEBUG nova.network.neutron [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Successfully created port: b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 595.909462] env[69027]: DEBUG nova.network.neutron [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Updated VIF entry in instance network info cache for port 4b364bfd-9e6a-4b38-b6d9-b8c1efff059b. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 595.909835] env[69027]: DEBUG nova.network.neutron [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Updating instance_info_cache with network_info: [{"id": "4b364bfd-9e6a-4b38-b6d9-b8c1efff059b", "address": "fa:16:3e:2f:ed:b9", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.115", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b364bfd-9e", "ovs_interfaceid": "4b364bfd-9e6a-4b38-b6d9-b8c1efff059b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.925329] env[69027]: DEBUG oslo_concurrency.lockutils [req-ec5e2184-45cc-493c-9ab2-559d771d383a req-1b930157-7706-43c7-b1f1-0ba8af8f3028 service nova] Releasing lock "refresh_cache-2e16f12d-194a-47a5-824d-062a684a86f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 596.215024] env[69027]: DEBUG nova.network.neutron [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Successfully updated port: 06b4aa22-a172-428a-8648-3a0c62009398 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 596.235053] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "refresh_cache-9ff1dac6-b328-42c3-babe-86aef27466c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.235250] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired lock "refresh_cache-9ff1dac6-b328-42c3-babe-86aef27466c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.235413] env[69027]: DEBUG nova.network.neutron [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 596.546174] env[69027]: DEBUG nova.network.neutron [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 596.668141] env[69027]: DEBUG nova.compute.manager [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Received event network-vif-plugged-ef3495b9-d03f-4d09-b3cd-a4d666267c1a {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 596.668501] env[69027]: DEBUG oslo_concurrency.lockutils [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] Acquiring lock "4ad409c8-465f-4106-946a-7f401358d5a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.669519] env[69027]: DEBUG oslo_concurrency.lockutils [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] Lock "4ad409c8-465f-4106-946a-7f401358d5a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 596.670356] env[69027]: DEBUG oslo_concurrency.lockutils [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] Lock "4ad409c8-465f-4106-946a-7f401358d5a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.670749] env[69027]: DEBUG nova.compute.manager [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] No waiting events found dispatching network-vif-plugged-ef3495b9-d03f-4d09-b3cd-a4d666267c1a {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 596.671245] env[69027]: WARNING nova.compute.manager [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Received unexpected event network-vif-plugged-ef3495b9-d03f-4d09-b3cd-a4d666267c1a for instance with vm_state building and task_state spawning. [ 596.671720] env[69027]: DEBUG nova.compute.manager [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Received event network-changed-ef3495b9-d03f-4d09-b3cd-a4d666267c1a {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 596.677178] env[69027]: DEBUG nova.compute.manager [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Refreshing instance network info cache due to event network-changed-ef3495b9-d03f-4d09-b3cd-a4d666267c1a. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 596.677178] env[69027]: DEBUG oslo_concurrency.lockutils [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] Acquiring lock "refresh_cache-4ad409c8-465f-4106-946a-7f401358d5a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 596.677178] env[69027]: DEBUG oslo_concurrency.lockutils [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] Acquired lock "refresh_cache-4ad409c8-465f-4106-946a-7f401358d5a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.677178] env[69027]: DEBUG nova.network.neutron [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Refreshing network info cache for port ef3495b9-d03f-4d09-b3cd-a4d666267c1a {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 597.025345] env[69027]: DEBUG nova.network.neutron [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Updating instance_info_cache with network_info: [{"id": "1d8b922a-b400-466a-a5c8-05b027a503e5", "address": "fa:16:3e:43:8e:72", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d8b922a-b4", "ovs_interfaceid": "1d8b922a-b400-466a-a5c8-05b027a503e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.047045] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Releasing lock "refresh_cache-ae5e2ca1-75e2-4023-b297-4cc265f038e5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.047045] env[69027]: DEBUG nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Instance network_info: |[{"id": "1d8b922a-b400-466a-a5c8-05b027a503e5", "address": "fa:16:3e:43:8e:72", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d8b922a-b4", "ovs_interfaceid": "1d8b922a-b400-466a-a5c8-05b027a503e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 597.047262] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:8e:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d8b922a-b400-466a-a5c8-05b027a503e5', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 597.053645] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Creating folder: Project (b8059f86ca2e4b5dbd7a6b46b8e672fe). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 597.054528] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d146594-b8e3-4d12-b603-176cf9def6ef {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.066074] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Created folder: Project (b8059f86ca2e4b5dbd7a6b46b8e672fe) in parent group-v677321. [ 597.066280] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Creating folder: Instances. Parent ref: group-v677337. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 597.066518] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6bfbde4c-0849-4b0b-90fc-33b68bb2d6e5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.076081] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Created folder: Instances in parent group-v677337. [ 597.076320] env[69027]: DEBUG oslo.service.loopingcall [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 597.076504] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 597.076701] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-439d79ac-8f2c-444d-b532-2ceff14cae51 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.097463] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 597.097463] env[69027]: value = "task-3395067" [ 597.097463] env[69027]: _type = "Task" [ 597.097463] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.106760] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395067, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.185904] env[69027]: DEBUG nova.network.neutron [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Successfully updated port: 7eb9f713-efec-4fc6-a124-93669bf3f39b {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 597.204250] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "refresh_cache-6ad953b9-4ded-42cd-86e0-2b1b707be4e4" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.204804] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquired lock "refresh_cache-6ad953b9-4ded-42cd-86e0-2b1b707be4e4" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.205019] env[69027]: DEBUG nova.network.neutron [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 597.608674] env[69027]: DEBUG nova.network.neutron [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Successfully updated port: 1c2fb916-4323-4348-8815-a12d0b82716b {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 597.617166] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395067, 'name': CreateVM_Task, 'duration_secs': 0.309092} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.617521] env[69027]: DEBUG nova.network.neutron [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 597.619561] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 597.620549] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.620752] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.621247] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 597.622254] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c23ab08d-5d7e-4377-b3a8-3c3dcb40a3a2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.625298] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquiring lock "refresh_cache-aec054f1-0d52-49be-9dee-8db0ae362f12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.625346] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquired lock "refresh_cache-aec054f1-0d52-49be-9dee-8db0ae362f12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.625530] env[69027]: DEBUG nova.network.neutron [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 597.630727] env[69027]: DEBUG oslo_vmware.api [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Waiting for the task: (returnval){ [ 597.630727] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52976cb3-27e2-08a4-3c21-1f6b7ace2fff" [ 597.630727] env[69027]: _type = "Task" [ 597.630727] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.639864] env[69027]: DEBUG oslo_vmware.api [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52976cb3-27e2-08a4-3c21-1f6b7ace2fff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.739280] env[69027]: DEBUG nova.network.neutron [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 597.774844] env[69027]: DEBUG nova.network.neutron [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Updating instance_info_cache with network_info: [{"id": "06b4aa22-a172-428a-8648-3a0c62009398", "address": "fa:16:3e:a9:16:e6", "network": {"id": "53d6d286-8793-449d-b16d-e57e8ae5ac67", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1420004536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9dec99d89ef4ee3a43b5242fced36a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06b4aa22-a1", "ovs_interfaceid": "06b4aa22-a172-428a-8648-3a0c62009398", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.798460] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Releasing lock "refresh_cache-9ff1dac6-b328-42c3-babe-86aef27466c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.799934] env[69027]: DEBUG nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Instance network_info: |[{"id": "06b4aa22-a172-428a-8648-3a0c62009398", "address": "fa:16:3e:a9:16:e6", "network": {"id": "53d6d286-8793-449d-b16d-e57e8ae5ac67", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1420004536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9dec99d89ef4ee3a43b5242fced36a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06b4aa22-a1", "ovs_interfaceid": "06b4aa22-a172-428a-8648-3a0c62009398", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 597.800143] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:16:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06b4aa22-a172-428a-8648-3a0c62009398', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 597.810168] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Creating folder: Project (f9dec99d89ef4ee3a43b5242fced36a8). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 597.810991] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c760efff-8bc5-4eab-a516-9019571c0d92 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.823800] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Created folder: Project (f9dec99d89ef4ee3a43b5242fced36a8) in parent group-v677321. [ 597.824017] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Creating folder: Instances. Parent ref: group-v677340. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 597.824540] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cdf58684-753c-436e-94e7-601d6d8d4a6c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.836598] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Created folder: Instances in parent group-v677340. [ 597.836598] env[69027]: DEBUG oslo.service.loopingcall [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 597.836990] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 597.837603] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7a9a0c1-b6ce-4ba1-9151-10ad1df9f27e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.861472] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 597.861472] env[69027]: value = "task-3395070" [ 597.861472] env[69027]: _type = "Task" [ 597.861472] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.869551] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395070, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.149890] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.150228] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 598.150367] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.179220] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "b6a38a84-0b95-494c-a423-3360824ed8d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.179456] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "b6a38a84-0b95-494c-a423-3360824ed8d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.229802] env[69027]: DEBUG nova.network.neutron [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Updating instance_info_cache with network_info: [{"id": "1c2fb916-4323-4348-8815-a12d0b82716b", "address": "fa:16:3e:c4:af:83", "network": {"id": "e4e3b623-8795-444a-90e1-6f9b5834cccb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2073664562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0edf1fbe4d4a438db0a192b02dbbdddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c2fb916-43", "ovs_interfaceid": "1c2fb916-4323-4348-8815-a12d0b82716b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.247765] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Releasing lock "refresh_cache-aec054f1-0d52-49be-9dee-8db0ae362f12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.248979] env[69027]: DEBUG nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Instance network_info: |[{"id": "1c2fb916-4323-4348-8815-a12d0b82716b", "address": "fa:16:3e:c4:af:83", "network": {"id": "e4e3b623-8795-444a-90e1-6f9b5834cccb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2073664562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0edf1fbe4d4a438db0a192b02dbbdddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c2fb916-43", "ovs_interfaceid": "1c2fb916-4323-4348-8815-a12d0b82716b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 598.249121] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:af:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73f6629b-7f80-4a5b-8f15-c7a1635b3c33', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c2fb916-4323-4348-8815-a12d0b82716b', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.257181] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Creating folder: Project (0edf1fbe4d4a438db0a192b02dbbdddd). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.257878] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3885299c-3340-447a-9a64-db849e91fdae {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.270244] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Created folder: Project (0edf1fbe4d4a438db0a192b02dbbdddd) in parent group-v677321. [ 598.270244] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Creating folder: Instances. Parent ref: group-v677343. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 598.270244] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-984ed732-9b43-4670-a261-a3823ff13c05 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.281448] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Created folder: Instances in parent group-v677343. [ 598.281677] env[69027]: DEBUG oslo.service.loopingcall [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 598.281862] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 598.282073] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6af0fa4-1dd6-4cc0-b7b7-4e11efaa2ba9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.303833] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 598.303833] env[69027]: value = "task-3395073" [ 598.303833] env[69027]: _type = "Task" [ 598.303833] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.313377] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395073, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.373131] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395070, 'name': CreateVM_Task, 'duration_secs': 0.282204} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.373359] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 598.374360] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.374360] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.377317] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 598.377317] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4952e51b-8f1e-458f-8407-b6c368c99cfd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.380027] env[69027]: DEBUG oslo_vmware.api [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 598.380027] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52a16938-44bb-d405-43b7-749d0f1c3c80" [ 598.380027] env[69027]: _type = "Task" [ 598.380027] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.388960] env[69027]: DEBUG oslo_vmware.api [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52a16938-44bb-d405-43b7-749d0f1c3c80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.538327] env[69027]: DEBUG nova.compute.manager [req-9fbf3cd6-bcd4-42dc-94fc-c887b7663936 req-cb00cb51-21b7-493f-8f03-8faa2a53415f service nova] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Received event network-vif-plugged-06b4aa22-a172-428a-8648-3a0c62009398 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 598.538703] env[69027]: DEBUG oslo_concurrency.lockutils [req-9fbf3cd6-bcd4-42dc-94fc-c887b7663936 req-cb00cb51-21b7-493f-8f03-8faa2a53415f service nova] Acquiring lock "9ff1dac6-b328-42c3-babe-86aef27466c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.538751] env[69027]: DEBUG oslo_concurrency.lockutils [req-9fbf3cd6-bcd4-42dc-94fc-c887b7663936 req-cb00cb51-21b7-493f-8f03-8faa2a53415f service nova] Lock "9ff1dac6-b328-42c3-babe-86aef27466c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.539043] env[69027]: DEBUG oslo_concurrency.lockutils [req-9fbf3cd6-bcd4-42dc-94fc-c887b7663936 req-cb00cb51-21b7-493f-8f03-8faa2a53415f service nova] Lock "9ff1dac6-b328-42c3-babe-86aef27466c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.542502] env[69027]: DEBUG nova.compute.manager [req-9fbf3cd6-bcd4-42dc-94fc-c887b7663936 req-cb00cb51-21b7-493f-8f03-8faa2a53415f service nova] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] No waiting events found dispatching network-vif-plugged-06b4aa22-a172-428a-8648-3a0c62009398 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 598.542751] env[69027]: WARNING nova.compute.manager [req-9fbf3cd6-bcd4-42dc-94fc-c887b7663936 req-cb00cb51-21b7-493f-8f03-8faa2a53415f service nova] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Received unexpected event network-vif-plugged-06b4aa22-a172-428a-8648-3a0c62009398 for instance with vm_state building and task_state spawning. [ 598.808901] env[69027]: DEBUG nova.network.neutron [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Updated VIF entry in instance network info cache for port ef3495b9-d03f-4d09-b3cd-a4d666267c1a. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 598.809803] env[69027]: DEBUG nova.network.neutron [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Updating instance_info_cache with network_info: [{"id": "ef3495b9-d03f-4d09-b3cd-a4d666267c1a", "address": "fa:16:3e:08:7f:1a", "network": {"id": "69b7884b-5cab-46d9-8b36-f3724a130d68", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1324551187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "795a02370bc043c4a3c51663ce48a550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca50cd14-9e1f-4d74-a066-e5a45ba0ce22", "external-id": "nsx-vlan-transportzone-348", "segmentation_id": 348, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef3495b9-d0", "ovs_interfaceid": "ef3495b9-d03f-4d09-b3cd-a4d666267c1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.823505] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395073, 'name': CreateVM_Task, 'duration_secs': 0.309693} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.823505] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 598.823505] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.827224] env[69027]: DEBUG oslo_concurrency.lockutils [req-76279169-f5cf-4a04-9cf6-b114b2af3519 req-ba3ad042-bb97-48f9-9779-7402df4ab84a service nova] Releasing lock "refresh_cache-4ad409c8-465f-4106-946a-7f401358d5a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.890814] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.891250] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 598.891533] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.891782] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.892318] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 598.892404] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d7cd3ec-81d6-47c1-8dd0-43e85e571c10 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.899423] env[69027]: DEBUG oslo_vmware.api [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Waiting for the task: (returnval){ [ 598.899423] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52987085-c25d-e10a-47ab-31ef5c0a4a69" [ 598.899423] env[69027]: _type = "Task" [ 598.899423] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.908399] env[69027]: DEBUG oslo_vmware.api [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52987085-c25d-e10a-47ab-31ef5c0a4a69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.150405] env[69027]: DEBUG nova.compute.manager [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Received event network-changed-06b4aa22-a172-428a-8648-3a0c62009398 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 599.150828] env[69027]: DEBUG nova.compute.manager [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Refreshing instance network info cache due to event network-changed-06b4aa22-a172-428a-8648-3a0c62009398. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 599.151469] env[69027]: DEBUG oslo_concurrency.lockutils [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] Acquiring lock "refresh_cache-9ff1dac6-b328-42c3-babe-86aef27466c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.153310] env[69027]: DEBUG oslo_concurrency.lockutils [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] Acquired lock "refresh_cache-9ff1dac6-b328-42c3-babe-86aef27466c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.153310] env[69027]: DEBUG nova.network.neutron [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Refreshing network info cache for port 06b4aa22-a172-428a-8648-3a0c62009398 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 599.219836] env[69027]: DEBUG nova.network.neutron [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Updating instance_info_cache with network_info: [{"id": "7eb9f713-efec-4fc6-a124-93669bf3f39b", "address": "fa:16:3e:ed:54:0f", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eb9f713-ef", "ovs_interfaceid": "7eb9f713-efec-4fc6-a124-93669bf3f39b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.238847] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Releasing lock "refresh_cache-6ad953b9-4ded-42cd-86e0-2b1b707be4e4" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.238995] env[69027]: DEBUG nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Instance network_info: |[{"id": "7eb9f713-efec-4fc6-a124-93669bf3f39b", "address": "fa:16:3e:ed:54:0f", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eb9f713-ef", "ovs_interfaceid": "7eb9f713-efec-4fc6-a124-93669bf3f39b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 599.239435] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:54:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7eb9f713-efec-4fc6-a124-93669bf3f39b', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 599.249245] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Creating folder: Project (08b65d39d1924efc9d2dbd4ed09e43e7). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 599.249245] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dda6a811-0f45-4f4b-9beb-a7060d22f4a2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.260684] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Created folder: Project (08b65d39d1924efc9d2dbd4ed09e43e7) in parent group-v677321. [ 599.260852] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Creating folder: Instances. Parent ref: group-v677346. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 599.261064] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce10ef5a-a8a4-41e8-ba84-54208d5a3cb7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.276448] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Created folder: Instances in parent group-v677346. [ 599.276710] env[69027]: DEBUG oslo.service.loopingcall [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 599.276903] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 599.277143] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b361277-d6a8-42a1-814c-d05c57509d82 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.304338] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 599.304338] env[69027]: value = "task-3395076" [ 599.304338] env[69027]: _type = "Task" [ 599.304338] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.314707] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395076, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.414335] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.414506] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 599.414685] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.814166] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395076, 'name': CreateVM_Task, 'duration_secs': 0.362602} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.814572] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 599.815103] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 599.815221] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.815551] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 599.817895] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59d6168c-3c67-4b4e-9d0f-5a437e3b163a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.823480] env[69027]: DEBUG oslo_vmware.api [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Waiting for the task: (returnval){ [ 599.823480] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]522bc0c6-7e1f-c2ea-47eb-b6a77d7d795d" [ 599.823480] env[69027]: _type = "Task" [ 599.823480] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.836692] env[69027]: DEBUG oslo_vmware.api [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]522bc0c6-7e1f-c2ea-47eb-b6a77d7d795d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.245723] env[69027]: DEBUG nova.network.neutron [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Successfully updated port: b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 600.264626] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "refresh_cache-7c4204b8-2858-43a5-855d-c99b00e91d0d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.265094] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquired lock "refresh_cache-7c4204b8-2858-43a5-855d-c99b00e91d0d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.265094] env[69027]: DEBUG nova.network.neutron [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 600.336048] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.336371] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.337500] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 600.438440] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.438838] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.457858] env[69027]: DEBUG nova.network.neutron [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 600.515811] env[69027]: DEBUG nova.network.neutron [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Updated VIF entry in instance network info cache for port 06b4aa22-a172-428a-8648-3a0c62009398. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 600.516521] env[69027]: DEBUG nova.network.neutron [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Updating instance_info_cache with network_info: [{"id": "06b4aa22-a172-428a-8648-3a0c62009398", "address": "fa:16:3e:a9:16:e6", "network": {"id": "53d6d286-8793-449d-b16d-e57e8ae5ac67", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1420004536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9dec99d89ef4ee3a43b5242fced36a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06b4aa22-a1", "ovs_interfaceid": "06b4aa22-a172-428a-8648-3a0c62009398", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.526674] env[69027]: DEBUG oslo_concurrency.lockutils [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] Releasing lock "refresh_cache-9ff1dac6-b328-42c3-babe-86aef27466c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.526922] env[69027]: DEBUG nova.compute.manager [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Received event network-vif-plugged-7eb9f713-efec-4fc6-a124-93669bf3f39b {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 600.527154] env[69027]: DEBUG oslo_concurrency.lockutils [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] Acquiring lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.527368] env[69027]: DEBUG oslo_concurrency.lockutils [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] Lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.527536] env[69027]: DEBUG oslo_concurrency.lockutils [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] Lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.527762] env[69027]: DEBUG nova.compute.manager [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] No waiting events found dispatching network-vif-plugged-7eb9f713-efec-4fc6-a124-93669bf3f39b {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 600.527951] env[69027]: WARNING nova.compute.manager [req-00eb1a1d-88ea-47cf-9c1d-21a312c1fe75 req-27b95bcf-d37f-427d-b6c0-970295512dcf service nova] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Received unexpected event network-vif-plugged-7eb9f713-efec-4fc6-a124-93669bf3f39b for instance with vm_state building and task_state spawning. [ 600.532175] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.532418] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.190578] env[69027]: DEBUG nova.network.neutron [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Updating instance_info_cache with network_info: [{"id": "b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33", "address": "fa:16:3e:48:2f:d5", "network": {"id": "d922564e-f0b6-4239-af10-fe21a763ae8c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-569805545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a319a0627bf40138abe3c293c111dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9d26f9c-2c", "ovs_interfaceid": "b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.210909] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Releasing lock "refresh_cache-7c4204b8-2858-43a5-855d-c99b00e91d0d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 601.211582] env[69027]: DEBUG nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Instance network_info: |[{"id": "b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33", "address": "fa:16:3e:48:2f:d5", "network": {"id": "d922564e-f0b6-4239-af10-fe21a763ae8c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-569805545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a319a0627bf40138abe3c293c111dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9d26f9c-2c", "ovs_interfaceid": "b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 601.212322] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:2f:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93341b73-918c-4e9d-9c66-ca171a54b574', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 601.224474] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Creating folder: Project (3a319a0627bf40138abe3c293c111dbb). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 601.224474] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad0af2f2-c7a0-410b-87d6-aa2c5cd25ef5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.236106] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Created folder: Project (3a319a0627bf40138abe3c293c111dbb) in parent group-v677321. [ 601.236106] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Creating folder: Instances. Parent ref: group-v677349. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 601.236106] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d9bfe1b-7f08-4a04-944c-5302624cdafa {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.245123] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Created folder: Instances in parent group-v677349. [ 601.245395] env[69027]: DEBUG oslo.service.loopingcall [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 601.245588] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 601.245788] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcb47b8c-2b2f-494c-a1cb-a4f79992358a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.267625] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 601.267625] env[69027]: value = "task-3395079" [ 601.267625] env[69027]: _type = "Task" [ 601.267625] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.278878] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395079, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.751312] env[69027]: DEBUG nova.compute.manager [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Received event network-vif-plugged-1d8b922a-b400-466a-a5c8-05b027a503e5 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 601.756499] env[69027]: DEBUG oslo_concurrency.lockutils [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] Acquiring lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.756499] env[69027]: DEBUG oslo_concurrency.lockutils [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] Lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.756499] env[69027]: DEBUG oslo_concurrency.lockutils [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] Lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.756499] env[69027]: DEBUG nova.compute.manager [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] No waiting events found dispatching network-vif-plugged-1d8b922a-b400-466a-a5c8-05b027a503e5 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 601.756768] env[69027]: WARNING nova.compute.manager [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Received unexpected event network-vif-plugged-1d8b922a-b400-466a-a5c8-05b027a503e5 for instance with vm_state building and task_state spawning. [ 601.756768] env[69027]: DEBUG nova.compute.manager [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Received event network-changed-1d8b922a-b400-466a-a5c8-05b027a503e5 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 601.758319] env[69027]: DEBUG nova.compute.manager [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Refreshing instance network info cache due to event network-changed-1d8b922a-b400-466a-a5c8-05b027a503e5. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 601.758319] env[69027]: DEBUG oslo_concurrency.lockutils [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] Acquiring lock "refresh_cache-ae5e2ca1-75e2-4023-b297-4cc265f038e5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.758319] env[69027]: DEBUG oslo_concurrency.lockutils [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] Acquired lock "refresh_cache-ae5e2ca1-75e2-4023-b297-4cc265f038e5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.758319] env[69027]: DEBUG nova.network.neutron [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Refreshing network info cache for port 1d8b922a-b400-466a-a5c8-05b027a503e5 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 601.794426] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395079, 'name': CreateVM_Task, 'duration_secs': 0.325044} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.794426] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 601.794426] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.794426] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.794426] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 601.794705] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c77062a-4dd7-43b5-aa51-8ca75f74b7f4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.801332] env[69027]: DEBUG oslo_vmware.api [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for the task: (returnval){ [ 601.801332] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52ed5787-f51f-487f-5e89-923688baad84" [ 601.801332] env[69027]: _type = "Task" [ 601.801332] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.814049] env[69027]: DEBUG oslo_vmware.api [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52ed5787-f51f-487f-5e89-923688baad84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.320430] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.321082] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 602.321426] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.436954] env[69027]: DEBUG nova.network.neutron [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Updated VIF entry in instance network info cache for port 1d8b922a-b400-466a-a5c8-05b027a503e5. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 602.437356] env[69027]: DEBUG nova.network.neutron [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Updating instance_info_cache with network_info: [{"id": "1d8b922a-b400-466a-a5c8-05b027a503e5", "address": "fa:16:3e:43:8e:72", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d8b922a-b4", "ovs_interfaceid": "1d8b922a-b400-466a-a5c8-05b027a503e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.449456] env[69027]: DEBUG oslo_concurrency.lockutils [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] Releasing lock "refresh_cache-ae5e2ca1-75e2-4023-b297-4cc265f038e5" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.449456] env[69027]: DEBUG nova.compute.manager [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Received event network-changed-1c2fb916-4323-4348-8815-a12d0b82716b {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 602.449456] env[69027]: DEBUG nova.compute.manager [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Refreshing instance network info cache due to event network-changed-1c2fb916-4323-4348-8815-a12d0b82716b. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 602.449709] env[69027]: DEBUG oslo_concurrency.lockutils [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] Acquiring lock "refresh_cache-aec054f1-0d52-49be-9dee-8db0ae362f12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.449814] env[69027]: DEBUG oslo_concurrency.lockutils [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] Acquired lock "refresh_cache-aec054f1-0d52-49be-9dee-8db0ae362f12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.450151] env[69027]: DEBUG nova.network.neutron [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Refreshing network info cache for port 1c2fb916-4323-4348-8815-a12d0b82716b {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 602.708701] env[69027]: DEBUG nova.compute.manager [req-9dcbafa5-1d21-4c03-abaa-8034800c61c3 req-a87aee16-c803-487c-a732-b19dbac7d6cc service nova] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Received event network-vif-plugged-1c2fb916-4323-4348-8815-a12d0b82716b {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 602.708935] env[69027]: DEBUG oslo_concurrency.lockutils [req-9dcbafa5-1d21-4c03-abaa-8034800c61c3 req-a87aee16-c803-487c-a732-b19dbac7d6cc service nova] Acquiring lock "aec054f1-0d52-49be-9dee-8db0ae362f12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.709161] env[69027]: DEBUG oslo_concurrency.lockutils [req-9dcbafa5-1d21-4c03-abaa-8034800c61c3 req-a87aee16-c803-487c-a732-b19dbac7d6cc service nova] Lock "aec054f1-0d52-49be-9dee-8db0ae362f12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.709333] env[69027]: DEBUG oslo_concurrency.lockutils [req-9dcbafa5-1d21-4c03-abaa-8034800c61c3 req-a87aee16-c803-487c-a732-b19dbac7d6cc service nova] Lock "aec054f1-0d52-49be-9dee-8db0ae362f12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.709499] env[69027]: DEBUG nova.compute.manager [req-9dcbafa5-1d21-4c03-abaa-8034800c61c3 req-a87aee16-c803-487c-a732-b19dbac7d6cc service nova] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] No waiting events found dispatching network-vif-plugged-1c2fb916-4323-4348-8815-a12d0b82716b {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 602.709668] env[69027]: WARNING nova.compute.manager [req-9dcbafa5-1d21-4c03-abaa-8034800c61c3 req-a87aee16-c803-487c-a732-b19dbac7d6cc service nova] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Received unexpected event network-vif-plugged-1c2fb916-4323-4348-8815-a12d0b82716b for instance with vm_state building and task_state spawning. [ 602.767140] env[69027]: DEBUG nova.compute.manager [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Received event network-changed-7eb9f713-efec-4fc6-a124-93669bf3f39b {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 602.767140] env[69027]: DEBUG nova.compute.manager [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Refreshing instance network info cache due to event network-changed-7eb9f713-efec-4fc6-a124-93669bf3f39b. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 602.767140] env[69027]: DEBUG oslo_concurrency.lockutils [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] Acquiring lock "refresh_cache-6ad953b9-4ded-42cd-86e0-2b1b707be4e4" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.769396] env[69027]: DEBUG oslo_concurrency.lockutils [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] Acquired lock "refresh_cache-6ad953b9-4ded-42cd-86e0-2b1b707be4e4" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.770942] env[69027]: DEBUG nova.network.neutron [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Refreshing network info cache for port 7eb9f713-efec-4fc6-a124-93669bf3f39b {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 603.058651] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c6c11a51-8958-4c94-9726-f5611cd3f0ce tempest-ServerDiagnosticsV248Test-1395699001 tempest-ServerDiagnosticsV248Test-1395699001-project-member] Acquiring lock "a00267a5-aa1c-434f-8201-157481bc0801" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.058651] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c6c11a51-8958-4c94-9726-f5611cd3f0ce tempest-ServerDiagnosticsV248Test-1395699001 tempest-ServerDiagnosticsV248Test-1395699001-project-member] Lock "a00267a5-aa1c-434f-8201-157481bc0801" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.072433] env[69027]: DEBUG nova.network.neutron [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Updated VIF entry in instance network info cache for port 1c2fb916-4323-4348-8815-a12d0b82716b. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 603.074858] env[69027]: DEBUG nova.network.neutron [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Updating instance_info_cache with network_info: [{"id": "1c2fb916-4323-4348-8815-a12d0b82716b", "address": "fa:16:3e:c4:af:83", "network": {"id": "e4e3b623-8795-444a-90e1-6f9b5834cccb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-2073664562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0edf1fbe4d4a438db0a192b02dbbdddd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73f6629b-7f80-4a5b-8f15-c7a1635b3c33", "external-id": "nsx-vlan-transportzone-481", "segmentation_id": 481, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c2fb916-43", "ovs_interfaceid": "1c2fb916-4323-4348-8815-a12d0b82716b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.087560] env[69027]: DEBUG oslo_concurrency.lockutils [req-faf20fe2-512c-4a0d-9dbe-a2782e3f42dc req-2fcb4c28-6a3d-4296-9e39-8fd58b489151 service nova] Releasing lock "refresh_cache-aec054f1-0d52-49be-9dee-8db0ae362f12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.373041] env[69027]: DEBUG nova.network.neutron [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Updated VIF entry in instance network info cache for port 7eb9f713-efec-4fc6-a124-93669bf3f39b. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 603.373041] env[69027]: DEBUG nova.network.neutron [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Updating instance_info_cache with network_info: [{"id": "7eb9f713-efec-4fc6-a124-93669bf3f39b", "address": "fa:16:3e:ed:54:0f", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.45", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eb9f713-ef", "ovs_interfaceid": "7eb9f713-efec-4fc6-a124-93669bf3f39b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.398378] env[69027]: DEBUG oslo_concurrency.lockutils [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] Releasing lock "refresh_cache-6ad953b9-4ded-42cd-86e0-2b1b707be4e4" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.399028] env[69027]: DEBUG nova.compute.manager [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Received event network-vif-plugged-b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 603.399028] env[69027]: DEBUG oslo_concurrency.lockutils [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] Acquiring lock "7c4204b8-2858-43a5-855d-c99b00e91d0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.399157] env[69027]: DEBUG oslo_concurrency.lockutils [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] Lock "7c4204b8-2858-43a5-855d-c99b00e91d0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.399556] env[69027]: DEBUG oslo_concurrency.lockutils [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] Lock "7c4204b8-2858-43a5-855d-c99b00e91d0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.399556] env[69027]: DEBUG nova.compute.manager [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] No waiting events found dispatching network-vif-plugged-b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 603.400047] env[69027]: WARNING nova.compute.manager [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Received unexpected event network-vif-plugged-b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33 for instance with vm_state building and task_state spawning. [ 603.400047] env[69027]: DEBUG nova.compute.manager [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Received event network-changed-b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 603.400200] env[69027]: DEBUG nova.compute.manager [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Refreshing instance network info cache due to event network-changed-b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 603.400461] env[69027]: DEBUG oslo_concurrency.lockutils [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] Acquiring lock "refresh_cache-7c4204b8-2858-43a5-855d-c99b00e91d0d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.400629] env[69027]: DEBUG oslo_concurrency.lockutils [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] Acquired lock "refresh_cache-7c4204b8-2858-43a5-855d-c99b00e91d0d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.400818] env[69027]: DEBUG nova.network.neutron [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Refreshing network info cache for port b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 604.160655] env[69027]: DEBUG nova.network.neutron [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Updated VIF entry in instance network info cache for port b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 604.160996] env[69027]: DEBUG nova.network.neutron [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Updating instance_info_cache with network_info: [{"id": "b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33", "address": "fa:16:3e:48:2f:d5", "network": {"id": "d922564e-f0b6-4239-af10-fe21a763ae8c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-569805545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a319a0627bf40138abe3c293c111dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9d26f9c-2c", "ovs_interfaceid": "b9d26f9c-2c3a-4faa-8c76-1866b8d6fe33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.173885] env[69027]: DEBUG oslo_concurrency.lockutils [req-5c2bceb1-9d00-41dd-9155-434df678ddcf req-e0aedcf7-d5b7-44b9-8ebc-f2807b7a39d9 service nova] Releasing lock "refresh_cache-7c4204b8-2858-43a5-855d-c99b00e91d0d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.590235] env[69027]: DEBUG oslo_concurrency.lockutils [None req-fd201bed-41c6-4137-a3b5-1086d66f4fd2 tempest-AttachInterfacesV270Test-947417479 tempest-AttachInterfacesV270Test-947417479-project-member] Acquiring lock "3aef6e75-f7bc-4a2c-aac0-daa7cfa38b87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.590565] env[69027]: DEBUG oslo_concurrency.lockutils [None req-fd201bed-41c6-4137-a3b5-1086d66f4fd2 tempest-AttachInterfacesV270Test-947417479 tempest-AttachInterfacesV270Test-947417479-project-member] Lock "3aef6e75-f7bc-4a2c-aac0-daa7cfa38b87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.638542] env[69027]: DEBUG oslo_concurrency.lockutils [None req-544bbf2b-93d0-4ee6-8511-b0baf9535b3d tempest-ServersWithSpecificFlavorTestJSON-443080628 tempest-ServersWithSpecificFlavorTestJSON-443080628-project-member] Acquiring lock "45dfaf74-5be6-4c63-9efc-4717d12e3d2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.638542] env[69027]: DEBUG oslo_concurrency.lockutils [None req-544bbf2b-93d0-4ee6-8511-b0baf9535b3d tempest-ServersWithSpecificFlavorTestJSON-443080628 tempest-ServersWithSpecificFlavorTestJSON-443080628-project-member] Lock "45dfaf74-5be6-4c63-9efc-4717d12e3d2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.607319] env[69027]: DEBUG oslo_concurrency.lockutils [None req-98532783-bb57-4e53-a10a-a7b3fe7028b4 tempest-VolumesAssistedSnapshotsTest-380461100 tempest-VolumesAssistedSnapshotsTest-380461100-project-member] Acquiring lock "b2b068c1-defd-4ee8-8611-9016924fc223" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.607674] env[69027]: DEBUG oslo_concurrency.lockutils [None req-98532783-bb57-4e53-a10a-a7b3fe7028b4 tempest-VolumesAssistedSnapshotsTest-380461100 tempest-VolumesAssistedSnapshotsTest-380461100-project-member] Lock "b2b068c1-defd-4ee8-8611-9016924fc223" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 611.182406] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8a80f6cf-1a19-472a-bc00-f9df7b957f08 tempest-ServersV294TestFqdnHostnames-190221613 tempest-ServersV294TestFqdnHostnames-190221613-project-member] Acquiring lock "40937830-3e23-49ff-aeb6-cdd6b62b0614" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.182406] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8a80f6cf-1a19-472a-bc00-f9df7b957f08 tempest-ServersV294TestFqdnHostnames-190221613 tempest-ServersV294TestFqdnHostnames-190221613-project-member] Lock "40937830-3e23-49ff-aeb6-cdd6b62b0614" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.401137] env[69027]: DEBUG oslo_concurrency.lockutils [None req-34b0117f-f6c1-429b-a72a-f7137c70d885 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquiring lock "c2497714-9a08-4ab0-9371-33060724f9d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.401452] env[69027]: DEBUG oslo_concurrency.lockutils [None req-34b0117f-f6c1-429b-a72a-f7137c70d885 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "c2497714-9a08-4ab0-9371-33060724f9d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.449281] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9647e20c-5932-4cf2-b8fe-0eb20e9869d2 tempest-ServerGroupTestJSON-1038527388 tempest-ServerGroupTestJSON-1038527388-project-member] Acquiring lock "7d2205d5-ed82-49cc-960b-b9da9584144a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.449573] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9647e20c-5932-4cf2-b8fe-0eb20e9869d2 tempest-ServerGroupTestJSON-1038527388 tempest-ServerGroupTestJSON-1038527388-project-member] Lock "7d2205d5-ed82-49cc-960b-b9da9584144a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.911285] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a7b922fd-bb77-4038-b04c-02e33cca7cd9 tempest-InstanceActionsTestJSON-1423893352 tempest-InstanceActionsTestJSON-1423893352-project-member] Acquiring lock "ded28a13-a8c2-45aa-978e-2a1de389d958" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.911399] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a7b922fd-bb77-4038-b04c-02e33cca7cd9 tempest-InstanceActionsTestJSON-1423893352 tempest-InstanceActionsTestJSON-1423893352-project-member] Lock "ded28a13-a8c2-45aa-978e-2a1de389d958" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.118937] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f2e3a613-da1c-48fd-87b3-3a9b13573913 tempest-ServerActionsTestJSON-803946511 tempest-ServerActionsTestJSON-803946511-project-member] Acquiring lock "db769146-b610-4d0a-8329-b977d8450a27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.121345] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f2e3a613-da1c-48fd-87b3-3a9b13573913 tempest-ServerActionsTestJSON-803946511 tempest-ServerActionsTestJSON-803946511-project-member] Lock "db769146-b610-4d0a-8329-b977d8450a27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.238832] env[69027]: WARNING oslo_vmware.rw_handles [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 622.238832] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 622.238832] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 622.238832] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 622.238832] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 622.238832] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 622.238832] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 622.238832] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 622.238832] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 622.238832] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 622.238832] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 622.238832] env[69027]: ERROR oslo_vmware.rw_handles [ 622.238832] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/45c407b6-4051-45bf-a54c-37e059c7984b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 622.239485] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 622.239629] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Copying Virtual Disk [datastore2] vmware_temp/45c407b6-4051-45bf-a54c-37e059c7984b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/45c407b6-4051-45bf-a54c-37e059c7984b/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 622.242126] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cbf174c8-adcd-4cf3-8229-64ddeaba5f54 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.249819] env[69027]: DEBUG oslo_vmware.api [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Waiting for the task: (returnval){ [ 622.249819] env[69027]: value = "task-3395080" [ 622.249819] env[69027]: _type = "Task" [ 622.249819] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.259870] env[69027]: DEBUG oslo_vmware.api [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Task: {'id': task-3395080, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.767851] env[69027]: DEBUG oslo_vmware.exceptions [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 622.767851] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.772382] env[69027]: ERROR nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 622.772382] env[69027]: Faults: ['InvalidArgument'] [ 622.772382] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] Traceback (most recent call last): [ 622.772382] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 622.772382] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] yield resources [ 622.772382] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 622.772382] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] self.driver.spawn(context, instance, image_meta, [ 622.772382] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 622.772382] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 622.772382] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 622.772382] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] self._fetch_image_if_missing(context, vi) [ 622.772382] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] image_cache(vi, tmp_image_ds_loc) [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] vm_util.copy_virtual_disk( [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] session._wait_for_task(vmdk_copy_task) [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] return self.wait_for_task(task_ref) [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] return evt.wait() [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] result = hub.switch() [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 622.773897] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] return self.greenlet.switch() [ 622.775684] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 622.775684] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] self.f(*self.args, **self.kw) [ 622.775684] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 622.775684] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] raise exceptions.translate_fault(task_info.error) [ 622.775684] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 622.775684] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] Faults: ['InvalidArgument'] [ 622.775684] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] [ 622.775684] env[69027]: INFO nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Terminating instance [ 622.776993] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.777418] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 622.778697] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquiring lock "refresh_cache-f719053e-2753-49c6-b47d-5adb698cafac" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.778697] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquired lock "refresh_cache-f719053e-2753-49c6-b47d-5adb698cafac" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.781846] env[69027]: DEBUG nova.network.neutron [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 622.781846] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e16fe2a3-66a4-443b-87e5-4939d1f0d8b4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.790589] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 622.790824] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 622.791554] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa437490-f21f-496e-bb75-6788b3a532a0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.808626] env[69027]: DEBUG oslo_vmware.api [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Waiting for the task: (returnval){ [ 622.808626] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5241da86-3e55-44fc-9740-19a4558df8a1" [ 622.808626] env[69027]: _type = "Task" [ 622.808626] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.820468] env[69027]: DEBUG oslo_vmware.api [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5241da86-3e55-44fc-9740-19a4558df8a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.860031] env[69027]: DEBUG nova.network.neutron [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.129123] env[69027]: DEBUG nova.network.neutron [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.142377] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Releasing lock "refresh_cache-f719053e-2753-49c6-b47d-5adb698cafac" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.143070] env[69027]: DEBUG nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 623.145055] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 623.145055] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd223c4-9d52-4fa6-af1c-f1913d7f97d4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.157428] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 623.158108] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f52c6b4-c8ae-4aa7-ae6c-52c8d493a257 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.189694] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 623.191097] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 623.191097] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Deleting the datastore file [datastore2] f719053e-2753-49c6-b47d-5adb698cafac {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.191097] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03e8eb42-f138-4a6d-9afd-eec76c5e6454 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.200774] env[69027]: DEBUG oslo_vmware.api [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Waiting for the task: (returnval){ [ 623.200774] env[69027]: value = "task-3395082" [ 623.200774] env[69027]: _type = "Task" [ 623.200774] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.213244] env[69027]: DEBUG oslo_vmware.api [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Task: {'id': task-3395082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.226202] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0f3af6e4-7f5d-4508-82e8-78d67eed5f9d tempest-ImagesNegativeTestJSON-1075311325 tempest-ImagesNegativeTestJSON-1075311325-project-member] Acquiring lock "0270a5ec-007f-4f7a-a6c6-05163c052452" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.226434] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0f3af6e4-7f5d-4508-82e8-78d67eed5f9d tempest-ImagesNegativeTestJSON-1075311325 tempest-ImagesNegativeTestJSON-1075311325-project-member] Lock "0270a5ec-007f-4f7a-a6c6-05163c052452" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.320659] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 623.320920] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Creating directory with path [datastore2] vmware_temp/0d5a6b98-c0a6-4aaa-923c-ed7ef976d00c/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 623.321756] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-149c8887-7da4-45c0-bac4-8569959e0e82 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.334031] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Created directory with path [datastore2] vmware_temp/0d5a6b98-c0a6-4aaa-923c-ed7ef976d00c/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 623.334251] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Fetch image to [datastore2] vmware_temp/0d5a6b98-c0a6-4aaa-923c-ed7ef976d00c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 623.335033] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/0d5a6b98-c0a6-4aaa-923c-ed7ef976d00c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 623.335249] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3343b90d-54f3-4f4f-9558-8267a8ac3b79 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.346437] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ede178-5e66-4345-a5d6-cadbed9c47ab {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.359931] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d2ed5a-c63f-46e1-ad5b-896d33951984 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.400228] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85f19ab-b274-4252-a6e4-e487d22a5885 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.405911] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-82024186-dd76-43a6-ac7a-4cd0f8461c02 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.497202] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 623.574977] env[69027]: DEBUG oslo_vmware.rw_handles [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0d5a6b98-c0a6-4aaa-923c-ed7ef976d00c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 623.636518] env[69027]: DEBUG oslo_vmware.rw_handles [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 623.636518] env[69027]: DEBUG oslo_vmware.rw_handles [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0d5a6b98-c0a6-4aaa-923c-ed7ef976d00c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 623.715303] env[69027]: DEBUG oslo_vmware.api [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Task: {'id': task-3395082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.049079} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.715562] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 623.715742] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 623.715910] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 623.716380] env[69027]: INFO nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Took 0.57 seconds to destroy the instance on the hypervisor. [ 623.716635] env[69027]: DEBUG oslo.service.loopingcall [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 623.716873] env[69027]: DEBUG nova.compute.manager [-] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Skipping network deallocation for instance since networking was not requested. {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 623.719793] env[69027]: DEBUG nova.compute.claims [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 623.720794] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.720794] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.239771] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0661b7dd-fd60-4b9a-9ed9-1e27c3af4b74 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.249596] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1928fcfc-099a-443b-b08c-eb3ac964a6e8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.285211] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7892edf8-53a2-4319-9e78-19b26d1f5985 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.293017] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0c0793-c5bb-4fc8-b49c-acd5ee0681ef {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.306997] env[69027]: DEBUG nova.compute.provider_tree [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.318058] env[69027]: DEBUG nova.scheduler.client.report [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 624.344050] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.624s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.344590] env[69027]: ERROR nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 624.344590] env[69027]: Faults: ['InvalidArgument'] [ 624.344590] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] Traceback (most recent call last): [ 624.344590] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 624.344590] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] self.driver.spawn(context, instance, image_meta, [ 624.344590] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 624.344590] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 624.344590] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 624.344590] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] self._fetch_image_if_missing(context, vi) [ 624.344590] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 624.344590] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] image_cache(vi, tmp_image_ds_loc) [ 624.344590] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] vm_util.copy_virtual_disk( [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] session._wait_for_task(vmdk_copy_task) [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] return self.wait_for_task(task_ref) [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] return evt.wait() [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] result = hub.switch() [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] return self.greenlet.switch() [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 624.344945] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] self.f(*self.args, **self.kw) [ 624.345394] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 624.345394] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] raise exceptions.translate_fault(task_info.error) [ 624.345394] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 624.345394] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] Faults: ['InvalidArgument'] [ 624.345394] env[69027]: ERROR nova.compute.manager [instance: f719053e-2753-49c6-b47d-5adb698cafac] [ 624.345394] env[69027]: DEBUG nova.compute.utils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 624.354732] env[69027]: DEBUG nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Build of instance f719053e-2753-49c6-b47d-5adb698cafac was re-scheduled: A specified parameter was not correct: fileType [ 624.354732] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 624.355410] env[69027]: DEBUG nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 624.355758] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquiring lock "refresh_cache-f719053e-2753-49c6-b47d-5adb698cafac" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.356186] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Acquired lock "refresh_cache-f719053e-2753-49c6-b47d-5adb698cafac" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.357358] env[69027]: DEBUG nova.network.neutron [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 624.422549] env[69027]: DEBUG nova.network.neutron [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 624.630175] env[69027]: DEBUG nova.network.neutron [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.646138] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Releasing lock "refresh_cache-f719053e-2753-49c6-b47d-5adb698cafac" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.646138] env[69027]: DEBUG nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 624.646138] env[69027]: DEBUG nova.compute.manager [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] [instance: f719053e-2753-49c6-b47d-5adb698cafac] Skipping network deallocation for instance since networking was not requested. {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 624.789639] env[69027]: INFO nova.scheduler.client.report [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Deleted allocations for instance f719053e-2753-49c6-b47d-5adb698cafac [ 624.826812] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f1b86809-b83b-4a6e-8390-afed9d823704 tempest-ServersAdmin275Test-2120331507 tempest-ServersAdmin275Test-2120331507-project-member] Lock "f719053e-2753-49c6-b47d-5adb698cafac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.242s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.884786] env[69027]: DEBUG nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 624.965152] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.965412] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.966983] env[69027]: INFO nova.compute.claims [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.207865] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] Acquiring lock "0522ed67-caaf-4018-b35f-252a6bbd2644" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.208159] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] Lock "0522ed67-caaf-4018-b35f-252a6bbd2644" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.244386] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] Acquiring lock "d14a7134-97a4-47e1-a49c-84e0189ecfec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.245208] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] Lock "d14a7134-97a4-47e1-a49c-84e0189ecfec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.294678] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] Acquiring lock "d2ae8743-b98e-403e-acc3-1be0eda5825b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.296232] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] Lock "d2ae8743-b98e-403e-acc3-1be0eda5825b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.455909] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10dc8e9-5da3-428c-aefa-ac9ef52888e4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.464230] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9087b044-dfa9-4555-a565-80a64fb90d26 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.507789] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73740bb1-b6ec-448a-9a0b-4080ccc8a1d4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.515483] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8daa2b1-4642-4495-b0c9-bda5328ed9b6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.531153] env[69027]: DEBUG nova.compute.provider_tree [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.543112] env[69027]: DEBUG nova.scheduler.client.report [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 625.562415] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.597s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.564220] env[69027]: DEBUG nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 625.617159] env[69027]: DEBUG nova.compute.utils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 625.618189] env[69027]: DEBUG nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 625.618502] env[69027]: DEBUG nova.network.neutron [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 625.633667] env[69027]: DEBUG nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 625.729815] env[69027]: DEBUG nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 625.760252] env[69027]: DEBUG nova.virt.hardware [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 625.760252] env[69027]: DEBUG nova.virt.hardware [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 625.760252] env[69027]: DEBUG nova.virt.hardware [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 625.760468] env[69027]: DEBUG nova.virt.hardware [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 625.760622] env[69027]: DEBUG nova.virt.hardware [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 625.760773] env[69027]: DEBUG nova.virt.hardware [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 625.760983] env[69027]: DEBUG nova.virt.hardware [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 625.761180] env[69027]: DEBUG nova.virt.hardware [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 625.761350] env[69027]: DEBUG nova.virt.hardware [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 625.761517] env[69027]: DEBUG nova.virt.hardware [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 625.761702] env[69027]: DEBUG nova.virt.hardware [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 625.762603] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffc00da-accb-4b7e-81e9-a75ce8c1b18c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.774100] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25a29a4-6434-432b-8fd0-2d7571328eae {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.837673] env[69027]: DEBUG nova.policy [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd31e55f937a84ec1a8868ccd1cfc6c6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9dec99d89ef4ee3a43b5242fced36a8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 626.999019] env[69027]: DEBUG nova.network.neutron [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Successfully created port: f249f646-24be-47b2-a22e-f5c48f7b8b43 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 628.734841] env[69027]: DEBUG nova.network.neutron [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Successfully updated port: f249f646-24be-47b2-a22e-f5c48f7b8b43 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 628.753179] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "refresh_cache-339bab90-238a-47ab-89f5-1ff9541ec14d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.753596] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired lock "refresh_cache-339bab90-238a-47ab-89f5-1ff9541ec14d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.753596] env[69027]: DEBUG nova.network.neutron [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 628.909552] env[69027]: DEBUG nova.network.neutron [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.163602] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Acquiring lock "887fbbeb-c981-4cc3-94e6-c232774507c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.163859] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Lock "887fbbeb-c981-4cc3-94e6-c232774507c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.395739] env[69027]: DEBUG nova.network.neutron [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Updating instance_info_cache with network_info: [{"id": "f249f646-24be-47b2-a22e-f5c48f7b8b43", "address": "fa:16:3e:f2:29:94", "network": {"id": "53d6d286-8793-449d-b16d-e57e8ae5ac67", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1420004536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9dec99d89ef4ee3a43b5242fced36a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf249f646-24", "ovs_interfaceid": "f249f646-24be-47b2-a22e-f5c48f7b8b43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.413363] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Releasing lock "refresh_cache-339bab90-238a-47ab-89f5-1ff9541ec14d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.413432] env[69027]: DEBUG nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Instance network_info: |[{"id": "f249f646-24be-47b2-a22e-f5c48f7b8b43", "address": "fa:16:3e:f2:29:94", "network": {"id": "53d6d286-8793-449d-b16d-e57e8ae5ac67", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1420004536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9dec99d89ef4ee3a43b5242fced36a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf249f646-24", "ovs_interfaceid": "f249f646-24be-47b2-a22e-f5c48f7b8b43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 629.413874] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:29:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f249f646-24be-47b2-a22e-f5c48f7b8b43', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 629.429384] env[69027]: DEBUG oslo.service.loopingcall [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 629.431122] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 629.431122] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40f8d52c-bc20-4c7e-b610-6f36ac42378b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.463959] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 629.463959] env[69027]: value = "task-3395083" [ 629.463959] env[69027]: _type = "Task" [ 629.463959] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.474830] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395083, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.898704] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f0a8815b-8f4c-415f-8508-3ef4c6746492 tempest-ServerShowV257Test-1857541781 tempest-ServerShowV257Test-1857541781-project-member] Acquiring lock "8e40eb4c-9c1e-4147-beb3-0d5fa311b9d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.898991] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f0a8815b-8f4c-415f-8508-3ef4c6746492 tempest-ServerShowV257Test-1857541781 tempest-ServerShowV257Test-1857541781-project-member] Lock "8e40eb4c-9c1e-4147-beb3-0d5fa311b9d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.974830] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395083, 'name': CreateVM_Task, 'duration_secs': 0.294532} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.974830] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 629.975985] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.975985] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.975985] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 629.975985] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5507fe53-ffcc-484f-92c2-68a2ac38028c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.981034] env[69027]: DEBUG oslo_vmware.api [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 629.981034] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52999a7b-33bc-b11c-d5fd-6703c2bdc79c" [ 629.981034] env[69027]: _type = "Task" [ 629.981034] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.988595] env[69027]: DEBUG oslo_vmware.api [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52999a7b-33bc-b11c-d5fd-6703c2bdc79c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.064625] env[69027]: DEBUG nova.compute.manager [req-a930887a-ba2b-4c5f-a850-5531ef7df9d1 req-2f181ada-d73c-46b0-a666-e2cacd9f1045 service nova] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Received event network-vif-plugged-f249f646-24be-47b2-a22e-f5c48f7b8b43 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 630.064625] env[69027]: DEBUG oslo_concurrency.lockutils [req-a930887a-ba2b-4c5f-a850-5531ef7df9d1 req-2f181ada-d73c-46b0-a666-e2cacd9f1045 service nova] Acquiring lock "339bab90-238a-47ab-89f5-1ff9541ec14d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.064625] env[69027]: DEBUG oslo_concurrency.lockutils [req-a930887a-ba2b-4c5f-a850-5531ef7df9d1 req-2f181ada-d73c-46b0-a666-e2cacd9f1045 service nova] Lock "339bab90-238a-47ab-89f5-1ff9541ec14d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 630.064625] env[69027]: DEBUG oslo_concurrency.lockutils [req-a930887a-ba2b-4c5f-a850-5531ef7df9d1 req-2f181ada-d73c-46b0-a666-e2cacd9f1045 service nova] Lock "339bab90-238a-47ab-89f5-1ff9541ec14d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.065134] env[69027]: DEBUG nova.compute.manager [req-a930887a-ba2b-4c5f-a850-5531ef7df9d1 req-2f181ada-d73c-46b0-a666-e2cacd9f1045 service nova] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] No waiting events found dispatching network-vif-plugged-f249f646-24be-47b2-a22e-f5c48f7b8b43 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 630.065134] env[69027]: WARNING nova.compute.manager [req-a930887a-ba2b-4c5f-a850-5531ef7df9d1 req-2f181ada-d73c-46b0-a666-e2cacd9f1045 service nova] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Received unexpected event network-vif-plugged-f249f646-24be-47b2-a22e-f5c48f7b8b43 for instance with vm_state building and task_state spawning. [ 630.492085] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.492344] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 630.492550] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.191038] env[69027]: DEBUG oslo_concurrency.lockutils [None req-fe2ba5b9-0e5c-420f-b361-523d31e16a2d tempest-ImagesOneServerTestJSON-251492339 tempest-ImagesOneServerTestJSON-251492339-project-member] Acquiring lock "918f38e1-e1b4-40d7-a79c-49257a814941" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.191038] env[69027]: DEBUG oslo_concurrency.lockutils [None req-fe2ba5b9-0e5c-420f-b361-523d31e16a2d tempest-ImagesOneServerTestJSON-251492339 tempest-ImagesOneServerTestJSON-251492339-project-member] Lock "918f38e1-e1b4-40d7-a79c-49257a814941" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.360394] env[69027]: DEBUG oslo_concurrency.lockutils [None req-361b5fc3-c112-431b-8930-ecb8c4a23db8 tempest-FloatingIPsAssociationNegativeTestJSON-1991375006 tempest-FloatingIPsAssociationNegativeTestJSON-1991375006-project-member] Acquiring lock "5bdb4d11-9125-4c28-89b0-8fd9147e0c0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.360598] env[69027]: DEBUG oslo_concurrency.lockutils [None req-361b5fc3-c112-431b-8930-ecb8c4a23db8 tempest-FloatingIPsAssociationNegativeTestJSON-1991375006 tempest-FloatingIPsAssociationNegativeTestJSON-1991375006-project-member] Lock "5bdb4d11-9125-4c28-89b0-8fd9147e0c0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.125607] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Acquiring lock "4927121e-a71e-47e9-9475-603096e82492" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.125851] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Lock "4927121e-a71e-47e9-9475-603096e82492" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.796030] env[69027]: DEBUG nova.compute.manager [req-b31e26e8-5a36-401e-8520-20ac1b83d728 req-a147c204-3530-45f6-81a4-ed7ed89099bb service nova] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Received event network-changed-f249f646-24be-47b2-a22e-f5c48f7b8b43 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 632.796950] env[69027]: DEBUG nova.compute.manager [req-b31e26e8-5a36-401e-8520-20ac1b83d728 req-a147c204-3530-45f6-81a4-ed7ed89099bb service nova] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Refreshing instance network info cache due to event network-changed-f249f646-24be-47b2-a22e-f5c48f7b8b43. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 632.797131] env[69027]: DEBUG oslo_concurrency.lockutils [req-b31e26e8-5a36-401e-8520-20ac1b83d728 req-a147c204-3530-45f6-81a4-ed7ed89099bb service nova] Acquiring lock "refresh_cache-339bab90-238a-47ab-89f5-1ff9541ec14d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.797302] env[69027]: DEBUG oslo_concurrency.lockutils [req-b31e26e8-5a36-401e-8520-20ac1b83d728 req-a147c204-3530-45f6-81a4-ed7ed89099bb service nova] Acquired lock "refresh_cache-339bab90-238a-47ab-89f5-1ff9541ec14d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.797506] env[69027]: DEBUG nova.network.neutron [req-b31e26e8-5a36-401e-8520-20ac1b83d728 req-a147c204-3530-45f6-81a4-ed7ed89099bb service nova] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Refreshing network info cache for port f249f646-24be-47b2-a22e-f5c48f7b8b43 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 633.222574] env[69027]: DEBUG nova.network.neutron [req-b31e26e8-5a36-401e-8520-20ac1b83d728 req-a147c204-3530-45f6-81a4-ed7ed89099bb service nova] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Updated VIF entry in instance network info cache for port f249f646-24be-47b2-a22e-f5c48f7b8b43. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 633.222574] env[69027]: DEBUG nova.network.neutron [req-b31e26e8-5a36-401e-8520-20ac1b83d728 req-a147c204-3530-45f6-81a4-ed7ed89099bb service nova] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Updating instance_info_cache with network_info: [{"id": "f249f646-24be-47b2-a22e-f5c48f7b8b43", "address": "fa:16:3e:f2:29:94", "network": {"id": "53d6d286-8793-449d-b16d-e57e8ae5ac67", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1420004536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9dec99d89ef4ee3a43b5242fced36a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf249f646-24", "ovs_interfaceid": "f249f646-24be-47b2-a22e-f5c48f7b8b43", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.235302] env[69027]: DEBUG oslo_concurrency.lockutils [req-b31e26e8-5a36-401e-8520-20ac1b83d728 req-a147c204-3530-45f6-81a4-ed7ed89099bb service nova] Releasing lock "refresh_cache-339bab90-238a-47ab-89f5-1ff9541ec14d" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.259838] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquiring lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.259838] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.868794] env[69027]: DEBUG oslo_concurrency.lockutils [None req-34f42b96-9e7c-4dcd-a599-54b301eafc39 tempest-AttachInterfacesUnderV243Test-492310881 tempest-AttachInterfacesUnderV243Test-492310881-project-member] Acquiring lock "fbc37887-513b-4b67-915f-e6862ea585e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.869274] env[69027]: DEBUG oslo_concurrency.lockutils [None req-34f42b96-9e7c-4dcd-a599-54b301eafc39 tempest-AttachInterfacesUnderV243Test-492310881 tempest-AttachInterfacesUnderV243Test-492310881-project-member] Lock "fbc37887-513b-4b67-915f-e6862ea585e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.427149] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aebe1c2a-11fc-4553-babd-236a00a537ff tempest-ServerDiagnosticsNegativeTest-1687517774 tempest-ServerDiagnosticsNegativeTest-1687517774-project-member] Acquiring lock "c42ce6eb-a29d-4e16-b5e6-ee507bd58819" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.427395] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aebe1c2a-11fc-4553-babd-236a00a537ff tempest-ServerDiagnosticsNegativeTest-1687517774 tempest-ServerDiagnosticsNegativeTest-1687517774-project-member] Lock "c42ce6eb-a29d-4e16-b5e6-ee507bd58819" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.492127] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c7f503b2-bba0-4290-aab2-dab356bbd5e2 tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] Acquiring lock "59c731c3-8604-481b-b761-29a5251411f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.492387] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c7f503b2-bba0-4290-aab2-dab356bbd5e2 tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] Lock "59c731c3-8604-481b-b761-29a5251411f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.182406] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 648.182702] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 648.206624] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 648.206808] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 648.207016] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 648.217819] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.218054] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.218230] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.218398] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 648.219478] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0840376-93f9-4d15-9394-0ed22f6e9ba8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.228545] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6762a1-271d-40fa-9821-d74d5d030fde {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.242694] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3efd6eb-99ee-498d-98a6-b3148c2edca3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.249240] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177ee0f2-b5a5-49ff-bd11-93ee9b8a2773 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.277626] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180954MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 648.277799] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.277999] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.350843] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 657170ea-02c4-4bc9-97d0-9aa3960fcaf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.351010] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a26389e3-7c20-4227-bd57-9d058964edc5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.351147] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 2e16f12d-194a-47a5-824d-062a684a86f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.351269] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ad409c8-465f-4106-946a-7f401358d5a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.351388] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ae5e2ca1-75e2-4023-b297-4cc265f038e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.351504] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9ff1dac6-b328-42c3-babe-86aef27466c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.351618] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance aec054f1-0d52-49be-9dee-8db0ae362f12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.351731] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 6ad953b9-4ded-42cd-86e0-2b1b707be4e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.351840] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 7c4204b8-2858-43a5-855d-c99b00e91d0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.351944] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 339bab90-238a-47ab-89f5-1ff9541ec14d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 648.376476] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b6a38a84-0b95-494c-a423-3360824ed8d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.401995] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.426996] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.437521] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a00267a5-aa1c-434f-8201-157481bc0801 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.447771] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3aef6e75-f7bc-4a2c-aac0-daa7cfa38b87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.459805] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 45dfaf74-5be6-4c63-9efc-4717d12e3d2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.469341] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b2b068c1-defd-4ee8-8611-9016924fc223 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.478929] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 40937830-3e23-49ff-aeb6-cdd6b62b0614 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.489685] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c2497714-9a08-4ab0-9371-33060724f9d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.501124] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 7d2205d5-ed82-49cc-960b-b9da9584144a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.511942] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ded28a13-a8c2-45aa-978e-2a1de389d958 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.522903] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance db769146-b610-4d0a-8329-b977d8450a27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.533821] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 0270a5ec-007f-4f7a-a6c6-05163c052452 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.543673] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 0522ed67-caaf-4018-b35f-252a6bbd2644 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.553507] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d14a7134-97a4-47e1-a49c-84e0189ecfec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.565653] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d2ae8743-b98e-403e-acc3-1be0eda5825b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.578732] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 887fbbeb-c981-4cc3-94e6-c232774507c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.591885] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 8e40eb4c-9c1e-4147-beb3-0d5fa311b9d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.606898] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 918f38e1-e1b4-40d7-a79c-49257a814941 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.618321] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 5bdb4d11-9125-4c28-89b0-8fd9147e0c0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.632053] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4927121e-a71e-47e9-9475-603096e82492 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.643943] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.655037] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbc37887-513b-4b67-915f-e6862ea585e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.666312] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c42ce6eb-a29d-4e16-b5e6-ee507bd58819 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.678227] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 59c731c3-8604-481b-b761-29a5251411f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 648.678516] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 648.678723] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 649.127494] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828612d5-742c-409f-a59a-6aaf89921d63 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.135183] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bea993-2ff8-489d-947c-365e02275321 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.167197] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edfd679-35a9-4c2c-9129-5b85360a5d22 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.175255] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a462bb-e4a7-4df6-bdbe-3b7d70325a1e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.188868] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.197651] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 649.214763] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 649.214958] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.937s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.779673] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.779920] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 649.781029] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 649.815593] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 649.815900] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 649.816102] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 649.816244] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 649.816372] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 649.816495] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 649.816616] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 649.816735] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 649.816858] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 649.816976] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 649.817111] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 649.818034] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.818034] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.818156] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.818309] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 649.818443] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 656.791891] env[69027]: DEBUG oslo_concurrency.lockutils [None req-977ecc39-201b-4b24-a71f-65988694224c tempest-ServersTestBootFromVolume-666257734 tempest-ServersTestBootFromVolume-666257734-project-member] Acquiring lock "caee2e65-98e5-424d-8dd8-057732b921fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.792172] env[69027]: DEBUG oslo_concurrency.lockutils [None req-977ecc39-201b-4b24-a71f-65988694224c tempest-ServersTestBootFromVolume-666257734 tempest-ServersTestBootFromVolume-666257734-project-member] Lock "caee2e65-98e5-424d-8dd8-057732b921fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.895206] env[69027]: WARNING oslo_vmware.rw_handles [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 672.895206] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 672.895206] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 672.895206] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 672.895206] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 672.895206] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 672.895206] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 672.895206] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 672.895206] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 672.895206] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 672.895206] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 672.895206] env[69027]: ERROR oslo_vmware.rw_handles [ 672.895690] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/0d5a6b98-c0a6-4aaa-923c-ed7ef976d00c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 672.897517] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 672.897869] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Copying Virtual Disk [datastore2] vmware_temp/0d5a6b98-c0a6-4aaa-923c-ed7ef976d00c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/0d5a6b98-c0a6-4aaa-923c-ed7ef976d00c/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 672.898229] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3cd65ea-ff41-45a1-bcad-600cff6ca2a5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.907349] env[69027]: DEBUG oslo_vmware.api [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Waiting for the task: (returnval){ [ 672.907349] env[69027]: value = "task-3395095" [ 672.907349] env[69027]: _type = "Task" [ 672.907349] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.915327] env[69027]: DEBUG oslo_vmware.api [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Task: {'id': task-3395095, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.419813] env[69027]: DEBUG oslo_vmware.exceptions [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 673.420187] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.420683] env[69027]: ERROR nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 673.420683] env[69027]: Faults: ['InvalidArgument'] [ 673.420683] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Traceback (most recent call last): [ 673.420683] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 673.420683] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] yield resources [ 673.420683] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 673.420683] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] self.driver.spawn(context, instance, image_meta, [ 673.420683] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 673.420683] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 673.420683] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 673.420683] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] self._fetch_image_if_missing(context, vi) [ 673.420683] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] image_cache(vi, tmp_image_ds_loc) [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] vm_util.copy_virtual_disk( [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] session._wait_for_task(vmdk_copy_task) [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] return self.wait_for_task(task_ref) [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] return evt.wait() [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] result = hub.switch() [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 673.421009] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] return self.greenlet.switch() [ 673.421332] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 673.421332] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] self.f(*self.args, **self.kw) [ 673.421332] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 673.421332] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] raise exceptions.translate_fault(task_info.error) [ 673.421332] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 673.421332] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Faults: ['InvalidArgument'] [ 673.421332] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] [ 673.421332] env[69027]: INFO nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Terminating instance [ 673.422605] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.422811] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.423056] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adc7f62f-0d53-4d72-8e58-6e9c3a3991b4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.426237] env[69027]: DEBUG nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 673.426467] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 673.427219] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bbb8ac-4806-4a40-b8fd-955bcc19c749 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.431254] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.431429] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 673.432428] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e1ed24d-8c0e-482e-b795-d04b98206718 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.436361] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 673.436831] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ddaaddb-1bcc-4914-85cc-7de02f9de78f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.439385] env[69027]: DEBUG oslo_vmware.api [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Waiting for the task: (returnval){ [ 673.439385] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]520d4e10-380d-287f-a565-06d09d600bf0" [ 673.439385] env[69027]: _type = "Task" [ 673.439385] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.447322] env[69027]: DEBUG oslo_vmware.api [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]520d4e10-380d-287f-a565-06d09d600bf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.514119] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 673.514119] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 673.514119] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Deleting the datastore file [datastore2] 657170ea-02c4-4bc9-97d0-9aa3960fcaf9 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 673.514119] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81774373-3880-4f38-b83c-d71b582f9381 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.519994] env[69027]: DEBUG oslo_vmware.api [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Waiting for the task: (returnval){ [ 673.519994] env[69027]: value = "task-3395097" [ 673.519994] env[69027]: _type = "Task" [ 673.519994] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.527659] env[69027]: DEBUG oslo_vmware.api [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Task: {'id': task-3395097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.952860] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 673.952860] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Creating directory with path [datastore2] vmware_temp/93d73816-b770-4365-8d48-ad02ee20f1eb/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.952860] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7bf39d0c-5898-4c05-930a-29f5c60489fb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.963704] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Created directory with path [datastore2] vmware_temp/93d73816-b770-4365-8d48-ad02ee20f1eb/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.963899] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Fetch image to [datastore2] vmware_temp/93d73816-b770-4365-8d48-ad02ee20f1eb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 673.964267] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/93d73816-b770-4365-8d48-ad02ee20f1eb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 673.964850] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca7e677-d74d-4edd-953c-d7cd12123fbb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.972301] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d760c8a-e630-412b-a4cc-856f9d3d41e4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.981573] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2659ea7b-9eb0-4aff-a89c-31e41ce45ce3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.012135] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babbaec4-ba37-4ba7-8f3a-35f9aa0d6dce {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.017893] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f0539bb9-3721-4006-a916-fa106b3fb609 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.028163] env[69027]: DEBUG oslo_vmware.api [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Task: {'id': task-3395097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065602} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.028502] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 674.028699] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 674.028873] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 674.029108] env[69027]: INFO nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Took 0.60 seconds to destroy the instance on the hypervisor. [ 674.031255] env[69027]: DEBUG nova.compute.claims [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 674.031429] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.031662] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.038677] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 674.099424] env[69027]: DEBUG oslo_vmware.rw_handles [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/93d73816-b770-4365-8d48-ad02ee20f1eb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 674.160600] env[69027]: DEBUG oslo_vmware.rw_handles [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 674.160833] env[69027]: DEBUG oslo_vmware.rw_handles [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/93d73816-b770-4365-8d48-ad02ee20f1eb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 674.549909] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f785b9-3a94-4bac-b761-331fcda61bc7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.557311] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ae8588-5a8a-4d6c-85a6-5cc43512a1e8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.586842] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3aba3d-eb90-4482-a122-efdb14acd012 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.593948] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d56bf9-947e-40be-8ba3-4f2c992d97f4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.606796] env[69027]: DEBUG nova.compute.provider_tree [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.615432] env[69027]: DEBUG nova.scheduler.client.report [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 674.629210] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.597s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.629755] env[69027]: ERROR nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 674.629755] env[69027]: Faults: ['InvalidArgument'] [ 674.629755] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Traceback (most recent call last): [ 674.629755] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 674.629755] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] self.driver.spawn(context, instance, image_meta, [ 674.629755] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 674.629755] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 674.629755] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 674.629755] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] self._fetch_image_if_missing(context, vi) [ 674.629755] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 674.629755] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] image_cache(vi, tmp_image_ds_loc) [ 674.629755] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] vm_util.copy_virtual_disk( [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] session._wait_for_task(vmdk_copy_task) [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] return self.wait_for_task(task_ref) [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] return evt.wait() [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] result = hub.switch() [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] return self.greenlet.switch() [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 674.630075] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] self.f(*self.args, **self.kw) [ 674.630318] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 674.630318] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] raise exceptions.translate_fault(task_info.error) [ 674.630318] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 674.630318] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Faults: ['InvalidArgument'] [ 674.630318] env[69027]: ERROR nova.compute.manager [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] [ 674.630591] env[69027]: DEBUG nova.compute.utils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 674.632240] env[69027]: DEBUG nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Build of instance 657170ea-02c4-4bc9-97d0-9aa3960fcaf9 was re-scheduled: A specified parameter was not correct: fileType [ 674.632240] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 674.632614] env[69027]: DEBUG nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 674.632788] env[69027]: DEBUG nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 674.632946] env[69027]: DEBUG nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 674.633126] env[69027]: DEBUG nova.network.neutron [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 675.015801] env[69027]: DEBUG nova.network.neutron [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.031621] env[69027]: INFO nova.compute.manager [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 657170ea-02c4-4bc9-97d0-9aa3960fcaf9] Took 0.40 seconds to deallocate network for instance. [ 675.143219] env[69027]: INFO nova.scheduler.client.report [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Deleted allocations for instance 657170ea-02c4-4bc9-97d0-9aa3960fcaf9 [ 675.173779] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2849fc38-3854-4b42-b1e9-970e5a96cd1d tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Lock "657170ea-02c4-4bc9-97d0-9aa3960fcaf9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.300s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.199132] env[69027]: DEBUG nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 675.253560] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.253825] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.255353] env[69027]: INFO nova.compute.claims [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 675.703401] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7a65f7-efa7-4694-9c2c-a14a10ab5a79 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.712181] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9991b5f-011d-49d5-b164-18384ba8aacd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.741863] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389da68c-5ed6-41a8-bc96-91fb9464a908 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.749077] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f7670e-527a-49bb-92de-9d99f48e9926 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.762168] env[69027]: DEBUG nova.compute.provider_tree [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.770676] env[69027]: DEBUG nova.scheduler.client.report [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 675.786544] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.533s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.787072] env[69027]: DEBUG nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 675.822665] env[69027]: DEBUG nova.compute.utils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 675.824557] env[69027]: DEBUG nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 675.824557] env[69027]: DEBUG nova.network.neutron [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 675.833394] env[69027]: DEBUG nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 675.897434] env[69027]: DEBUG nova.policy [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6ed005a4359c41209b43f043c98ad0ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a319a0627bf40138abe3c293c111dbb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 675.917584] env[69027]: DEBUG nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 676.058501] env[69027]: DEBUG nova.virt.hardware [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 676.058871] env[69027]: DEBUG nova.virt.hardware [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 676.058871] env[69027]: DEBUG nova.virt.hardware [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 676.059013] env[69027]: DEBUG nova.virt.hardware [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 676.059184] env[69027]: DEBUG nova.virt.hardware [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 676.059334] env[69027]: DEBUG nova.virt.hardware [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 676.059539] env[69027]: DEBUG nova.virt.hardware [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 676.059705] env[69027]: DEBUG nova.virt.hardware [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 676.059862] env[69027]: DEBUG nova.virt.hardware [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 676.060088] env[69027]: DEBUG nova.virt.hardware [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 676.060345] env[69027]: DEBUG nova.virt.hardware [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 676.061226] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5308ed4-990e-4abe-b67e-d4b7063134d5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.069757] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f35148e-f3cc-469a-8483-9716d10271b1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.322656] env[69027]: DEBUG nova.network.neutron [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Successfully created port: f4f01f8c-bf27-4330-81d6-4cf650366710 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 676.481871] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0f498c2f-adc7-4a9c-b8df-3d6e432363a9 tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Acquiring lock "695f7335-8e1d-4d7f-a377-8666b12e30f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.481871] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0f498c2f-adc7-4a9c-b8df-3d6e432363a9 tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Lock "695f7335-8e1d-4d7f-a377-8666b12e30f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.203978] env[69027]: DEBUG nova.network.neutron [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Successfully updated port: f4f01f8c-bf27-4330-81d6-4cf650366710 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 677.219235] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "refresh_cache-b6a38a84-0b95-494c-a423-3360824ed8d3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.219394] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquired lock "refresh_cache-b6a38a84-0b95-494c-a423-3360824ed8d3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.219546] env[69027]: DEBUG nova.network.neutron [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 677.288747] env[69027]: DEBUG nova.network.neutron [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 677.382516] env[69027]: DEBUG nova.compute.manager [req-7536a5bc-7bd6-4a91-a657-f4d79ef304b3 req-0acd4573-2c00-4645-bf51-a91027bd713d service nova] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Received event network-vif-plugged-f4f01f8c-bf27-4330-81d6-4cf650366710 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 677.382731] env[69027]: DEBUG oslo_concurrency.lockutils [req-7536a5bc-7bd6-4a91-a657-f4d79ef304b3 req-0acd4573-2c00-4645-bf51-a91027bd713d service nova] Acquiring lock "b6a38a84-0b95-494c-a423-3360824ed8d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.382935] env[69027]: DEBUG oslo_concurrency.lockutils [req-7536a5bc-7bd6-4a91-a657-f4d79ef304b3 req-0acd4573-2c00-4645-bf51-a91027bd713d service nova] Lock "b6a38a84-0b95-494c-a423-3360824ed8d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.383132] env[69027]: DEBUG oslo_concurrency.lockutils [req-7536a5bc-7bd6-4a91-a657-f4d79ef304b3 req-0acd4573-2c00-4645-bf51-a91027bd713d service nova] Lock "b6a38a84-0b95-494c-a423-3360824ed8d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.383304] env[69027]: DEBUG nova.compute.manager [req-7536a5bc-7bd6-4a91-a657-f4d79ef304b3 req-0acd4573-2c00-4645-bf51-a91027bd713d service nova] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] No waiting events found dispatching network-vif-plugged-f4f01f8c-bf27-4330-81d6-4cf650366710 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 677.383468] env[69027]: WARNING nova.compute.manager [req-7536a5bc-7bd6-4a91-a657-f4d79ef304b3 req-0acd4573-2c00-4645-bf51-a91027bd713d service nova] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Received unexpected event network-vif-plugged-f4f01f8c-bf27-4330-81d6-4cf650366710 for instance with vm_state building and task_state spawning. [ 677.518468] env[69027]: DEBUG nova.network.neutron [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Updating instance_info_cache with network_info: [{"id": "f4f01f8c-bf27-4330-81d6-4cf650366710", "address": "fa:16:3e:b9:5f:34", "network": {"id": "d922564e-f0b6-4239-af10-fe21a763ae8c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-569805545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a319a0627bf40138abe3c293c111dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4f01f8c-bf", "ovs_interfaceid": "f4f01f8c-bf27-4330-81d6-4cf650366710", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.531946] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Releasing lock "refresh_cache-b6a38a84-0b95-494c-a423-3360824ed8d3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.532456] env[69027]: DEBUG nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Instance network_info: |[{"id": "f4f01f8c-bf27-4330-81d6-4cf650366710", "address": "fa:16:3e:b9:5f:34", "network": {"id": "d922564e-f0b6-4239-af10-fe21a763ae8c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-569805545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a319a0627bf40138abe3c293c111dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4f01f8c-bf", "ovs_interfaceid": "f4f01f8c-bf27-4330-81d6-4cf650366710", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 677.533149] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:5f:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93341b73-918c-4e9d-9c66-ca171a54b574', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4f01f8c-bf27-4330-81d6-4cf650366710', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 677.540660] env[69027]: DEBUG oslo.service.loopingcall [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 677.541490] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 677.541721] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11347f44-bba2-41d4-944d-2fec9bf96ec5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.561245] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 677.561245] env[69027]: value = "task-3395098" [ 677.561245] env[69027]: _type = "Task" [ 677.561245] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.569127] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395098, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.071431] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395098, 'name': CreateVM_Task, 'duration_secs': 0.320913} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.071612] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 678.072300] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 678.072470] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.072793] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 678.073059] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08345cfa-dc8c-4b00-85c5-0798172194ce {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.077701] env[69027]: DEBUG oslo_vmware.api [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for the task: (returnval){ [ 678.077701] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52068933-f397-392d-f75e-f0db00752296" [ 678.077701] env[69027]: _type = "Task" [ 678.077701] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.086694] env[69027]: DEBUG oslo_vmware.api [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52068933-f397-392d-f75e-f0db00752296, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.587776] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 678.588072] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 678.588310] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.409242] env[69027]: DEBUG nova.compute.manager [req-a119aaa8-44a8-4637-89be-e5d25201572c req-e65279c6-03e4-44fa-9f58-fd693b9b099f service nova] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Received event network-changed-f4f01f8c-bf27-4330-81d6-4cf650366710 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 679.409436] env[69027]: DEBUG nova.compute.manager [req-a119aaa8-44a8-4637-89be-e5d25201572c req-e65279c6-03e4-44fa-9f58-fd693b9b099f service nova] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Refreshing instance network info cache due to event network-changed-f4f01f8c-bf27-4330-81d6-4cf650366710. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 679.409612] env[69027]: DEBUG oslo_concurrency.lockutils [req-a119aaa8-44a8-4637-89be-e5d25201572c req-e65279c6-03e4-44fa-9f58-fd693b9b099f service nova] Acquiring lock "refresh_cache-b6a38a84-0b95-494c-a423-3360824ed8d3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.409726] env[69027]: DEBUG oslo_concurrency.lockutils [req-a119aaa8-44a8-4637-89be-e5d25201572c req-e65279c6-03e4-44fa-9f58-fd693b9b099f service nova] Acquired lock "refresh_cache-b6a38a84-0b95-494c-a423-3360824ed8d3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.409890] env[69027]: DEBUG nova.network.neutron [req-a119aaa8-44a8-4637-89be-e5d25201572c req-e65279c6-03e4-44fa-9f58-fd693b9b099f service nova] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Refreshing network info cache for port f4f01f8c-bf27-4330-81d6-4cf650366710 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 679.945889] env[69027]: DEBUG nova.network.neutron [req-a119aaa8-44a8-4637-89be-e5d25201572c req-e65279c6-03e4-44fa-9f58-fd693b9b099f service nova] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Updated VIF entry in instance network info cache for port f4f01f8c-bf27-4330-81d6-4cf650366710. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 679.946671] env[69027]: DEBUG nova.network.neutron [req-a119aaa8-44a8-4637-89be-e5d25201572c req-e65279c6-03e4-44fa-9f58-fd693b9b099f service nova] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Updating instance_info_cache with network_info: [{"id": "f4f01f8c-bf27-4330-81d6-4cf650366710", "address": "fa:16:3e:b9:5f:34", "network": {"id": "d922564e-f0b6-4239-af10-fe21a763ae8c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-569805545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a319a0627bf40138abe3c293c111dbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4f01f8c-bf", "ovs_interfaceid": "f4f01f8c-bf27-4330-81d6-4cf650366710", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.958107] env[69027]: DEBUG oslo_concurrency.lockutils [req-a119aaa8-44a8-4637-89be-e5d25201572c req-e65279c6-03e4-44fa-9f58-fd693b9b099f service nova] Releasing lock "refresh_cache-b6a38a84-0b95-494c-a423-3360824ed8d3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.771397] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 707.771742] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 707.771806] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 707.783963] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.784173] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.784344] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.784560] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 707.785784] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610f2886-0f53-45a9-8dfb-2dcef90bdfc1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.794478] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc00e732-d55b-4f18-9055-3b25e8ae49fe {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.808308] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa84bb5-29a9-4a89-8061-4f2046dc3236 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.814841] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5651ff-6300-4297-bd30-ce5ffed119a1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.849695] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180980MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 707.849695] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.849695] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.922099] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a26389e3-7c20-4227-bd57-9d058964edc5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 707.922267] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 2e16f12d-194a-47a5-824d-062a684a86f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 707.922650] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ad409c8-465f-4106-946a-7f401358d5a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 707.922650] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ae5e2ca1-75e2-4023-b297-4cc265f038e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 707.922650] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9ff1dac6-b328-42c3-babe-86aef27466c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 707.922838] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance aec054f1-0d52-49be-9dee-8db0ae362f12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 707.922838] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 6ad953b9-4ded-42cd-86e0-2b1b707be4e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 707.922974] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 7c4204b8-2858-43a5-855d-c99b00e91d0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 707.923085] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 339bab90-238a-47ab-89f5-1ff9541ec14d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 707.923202] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b6a38a84-0b95-494c-a423-3360824ed8d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 707.938451] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 707.949332] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 707.959927] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a00267a5-aa1c-434f-8201-157481bc0801 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 707.970272] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3aef6e75-f7bc-4a2c-aac0-daa7cfa38b87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 707.980513] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 45dfaf74-5be6-4c63-9efc-4717d12e3d2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 707.991327] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b2b068c1-defd-4ee8-8611-9016924fc223 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.001504] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 40937830-3e23-49ff-aeb6-cdd6b62b0614 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.011854] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c2497714-9a08-4ab0-9371-33060724f9d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.021904] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 7d2205d5-ed82-49cc-960b-b9da9584144a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.031365] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ded28a13-a8c2-45aa-978e-2a1de389d958 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.041182] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance db769146-b610-4d0a-8329-b977d8450a27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.051021] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 0270a5ec-007f-4f7a-a6c6-05163c052452 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.061256] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 0522ed67-caaf-4018-b35f-252a6bbd2644 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.071215] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d14a7134-97a4-47e1-a49c-84e0189ecfec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.081960] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d2ae8743-b98e-403e-acc3-1be0eda5825b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.092290] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 887fbbeb-c981-4cc3-94e6-c232774507c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.101954] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 8e40eb4c-9c1e-4147-beb3-0d5fa311b9d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.112851] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 918f38e1-e1b4-40d7-a79c-49257a814941 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.122367] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 5bdb4d11-9125-4c28-89b0-8fd9147e0c0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.131174] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4927121e-a71e-47e9-9475-603096e82492 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.140655] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.149726] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbc37887-513b-4b67-915f-e6862ea585e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.159162] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c42ce6eb-a29d-4e16-b5e6-ee507bd58819 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.168462] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 59c731c3-8604-481b-b761-29a5251411f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.177888] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance caee2e65-98e5-424d-8dd8-057732b921fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.186930] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 695f7335-8e1d-4d7f-a377-8666b12e30f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 708.187202] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 708.187353] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 708.574714] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7cd650-c43f-4ba1-abda-e1db414471a9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.582320] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37fba1fd-0409-4310-ad86-9b21a1bfa4de {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.611833] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d0443a-501b-4c1a-88cb-6ba3b06e9385 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.619019] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b954d5a-9a63-4609-9a83-8c9478785229 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.632177] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.641058] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 708.656320] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 708.656779] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.809s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.656747] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.657013] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 709.771195] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.771313] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 709.771437] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 709.793165] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 709.793361] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 709.793475] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 709.793603] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 709.793729] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 709.793851] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 709.793971] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 709.794105] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 709.794227] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 709.794342] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 709.794508] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 709.794932] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 709.795119] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 710.771053] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 711.772235] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 722.912143] env[69027]: WARNING oslo_vmware.rw_handles [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 722.912143] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 722.912143] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 722.912143] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 722.912143] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 722.912143] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 722.912143] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 722.912143] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 722.912143] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 722.912143] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 722.912143] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 722.912143] env[69027]: ERROR oslo_vmware.rw_handles [ 722.912935] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/93d73816-b770-4365-8d48-ad02ee20f1eb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 722.914386] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 722.914731] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Copying Virtual Disk [datastore2] vmware_temp/93d73816-b770-4365-8d48-ad02ee20f1eb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/93d73816-b770-4365-8d48-ad02ee20f1eb/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 722.914986] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37162c67-dd1e-425c-ac1b-558b4f687244 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.923973] env[69027]: DEBUG oslo_vmware.api [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Waiting for the task: (returnval){ [ 722.923973] env[69027]: value = "task-3395099" [ 722.923973] env[69027]: _type = "Task" [ 722.923973] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.933098] env[69027]: DEBUG oslo_vmware.api [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Task: {'id': task-3395099, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.436980] env[69027]: DEBUG oslo_vmware.exceptions [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 723.436980] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.436980] env[69027]: ERROR nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 723.436980] env[69027]: Faults: ['InvalidArgument'] [ 723.436980] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Traceback (most recent call last): [ 723.436980] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 723.436980] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] yield resources [ 723.436980] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 723.436980] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] self.driver.spawn(context, instance, image_meta, [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] self._fetch_image_if_missing(context, vi) [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] image_cache(vi, tmp_image_ds_loc) [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] vm_util.copy_virtual_disk( [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] session._wait_for_task(vmdk_copy_task) [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] return self.wait_for_task(task_ref) [ 723.437286] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] return evt.wait() [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] result = hub.switch() [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] return self.greenlet.switch() [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] self.f(*self.args, **self.kw) [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] raise exceptions.translate_fault(task_info.error) [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Faults: ['InvalidArgument'] [ 723.437630] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] [ 723.437928] env[69027]: INFO nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Terminating instance [ 723.438127] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.438344] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 723.439155] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d282b680-3f02-40eb-b91f-5fc1f7de76b3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.440849] env[69027]: DEBUG nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 723.441055] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 723.441777] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61804b2-8e5f-4717-a929-ab0b21ce4eae {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.448518] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 723.448734] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7adc337-e5d8-4f85-bcd3-87c8c68d7705 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.450945] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 723.451136] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 723.452385] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d889dc0a-f8fe-40fd-84b4-2f9dcd956c3b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.457136] env[69027]: DEBUG oslo_vmware.api [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Waiting for the task: (returnval){ [ 723.457136] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52abd66f-bab1-abf3-c80b-744962cc3175" [ 723.457136] env[69027]: _type = "Task" [ 723.457136] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.463883] env[69027]: DEBUG oslo_vmware.api [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52abd66f-bab1-abf3-c80b-744962cc3175, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.524699] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 723.524947] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 723.525144] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Deleting the datastore file [datastore2] a26389e3-7c20-4227-bd57-9d058964edc5 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 723.525415] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2419d8a7-ef59-4090-bfb3-7275ef0fc894 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.531537] env[69027]: DEBUG oslo_vmware.api [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Waiting for the task: (returnval){ [ 723.531537] env[69027]: value = "task-3395101" [ 723.531537] env[69027]: _type = "Task" [ 723.531537] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.538952] env[69027]: DEBUG oslo_vmware.api [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Task: {'id': task-3395101, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.967181] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 723.967450] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Creating directory with path [datastore2] vmware_temp/fc29c63e-2edb-41a6-8a6b-0604a5e89dc6/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 723.967671] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34cbd52e-440e-4a0d-b43d-2bd2af3e22dc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.979358] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Created directory with path [datastore2] vmware_temp/fc29c63e-2edb-41a6-8a6b-0604a5e89dc6/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 723.979603] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Fetch image to [datastore2] vmware_temp/fc29c63e-2edb-41a6-8a6b-0604a5e89dc6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 723.979838] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/fc29c63e-2edb-41a6-8a6b-0604a5e89dc6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 723.980583] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a5458e-2480-4ab7-ad16-4ae72ebf9cfd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.987305] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8c1c71-4103-4121-8172-33efa4f0cd11 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.996678] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624b1682-0864-4666-8066-e914cf2d17ad {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.029016] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9035ac-376c-4372-8c6c-1fbc94567ca9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.035943] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f660d308-11ec-450f-b7b5-1732a789396a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.044018] env[69027]: DEBUG oslo_vmware.api [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Task: {'id': task-3395101, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074053} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.044018] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 724.044018] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 724.044018] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 724.044018] env[69027]: INFO nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Took 0.60 seconds to destroy the instance on the hypervisor. [ 724.046186] env[69027]: DEBUG nova.compute.claims [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 724.046513] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.047620] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.057888] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 724.110130] env[69027]: DEBUG oslo_vmware.rw_handles [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fc29c63e-2edb-41a6-8a6b-0604a5e89dc6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 724.172481] env[69027]: DEBUG oslo_vmware.rw_handles [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 724.172653] env[69027]: DEBUG oslo_vmware.rw_handles [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fc29c63e-2edb-41a6-8a6b-0604a5e89dc6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 724.552025] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ecbbfa9-67ed-4faf-97f6-5e339de2af5c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.559472] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340b7b08-096c-4fd2-af9b-f376a3be9d37 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.588706] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba389e9-9d4f-4eeb-9584-837c3765b30c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.595912] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba26a93-da8f-4e18-b3b9-1df9e3afb947 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.608946] env[69027]: DEBUG nova.compute.provider_tree [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.617903] env[69027]: DEBUG nova.scheduler.client.report [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 724.632867] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.585s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.632867] env[69027]: ERROR nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 724.632867] env[69027]: Faults: ['InvalidArgument'] [ 724.632867] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Traceback (most recent call last): [ 724.632867] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 724.632867] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] self.driver.spawn(context, instance, image_meta, [ 724.632867] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 724.632867] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 724.632867] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 724.632867] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] self._fetch_image_if_missing(context, vi) [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] image_cache(vi, tmp_image_ds_loc) [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] vm_util.copy_virtual_disk( [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] session._wait_for_task(vmdk_copy_task) [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] return self.wait_for_task(task_ref) [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] return evt.wait() [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] result = hub.switch() [ 724.633159] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 724.633453] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] return self.greenlet.switch() [ 724.633453] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 724.633453] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] self.f(*self.args, **self.kw) [ 724.633453] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 724.633453] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] raise exceptions.translate_fault(task_info.error) [ 724.633453] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 724.633453] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Faults: ['InvalidArgument'] [ 724.633453] env[69027]: ERROR nova.compute.manager [instance: a26389e3-7c20-4227-bd57-9d058964edc5] [ 724.633453] env[69027]: DEBUG nova.compute.utils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 724.636979] env[69027]: DEBUG nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Build of instance a26389e3-7c20-4227-bd57-9d058964edc5 was re-scheduled: A specified parameter was not correct: fileType [ 724.636979] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 724.637381] env[69027]: DEBUG nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 724.637559] env[69027]: DEBUG nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 724.637720] env[69027]: DEBUG nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 724.637886] env[69027]: DEBUG nova.network.neutron [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 724.992583] env[69027]: DEBUG nova.network.neutron [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.003678] env[69027]: INFO nova.compute.manager [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] [instance: a26389e3-7c20-4227-bd57-9d058964edc5] Took 0.37 seconds to deallocate network for instance. [ 725.095553] env[69027]: INFO nova.scheduler.client.report [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Deleted allocations for instance a26389e3-7c20-4227-bd57-9d058964edc5 [ 725.115088] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7accbd73-b9a0-42f5-a672-47c001ce0e8f tempest-ServerDiagnosticsTest-343328441 tempest-ServerDiagnosticsTest-343328441-project-member] Lock "a26389e3-7c20-4227-bd57-9d058964edc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.029s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.128752] env[69027]: DEBUG nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 725.180024] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.180205] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.181681] env[69027]: INFO nova.compute.claims [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.605207] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4575067c-9448-49fe-9469-333052a7102b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.613051] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e523c74d-e09f-4af1-85a4-0313f0205f82 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.643428] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60910ca-ed4d-4e63-9ac2-36f5e9867f6c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.651859] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe9eaf3-44a9-4a1c-a022-10bb40c1d89e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.667152] env[69027]: DEBUG nova.compute.provider_tree [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.676428] env[69027]: DEBUG nova.scheduler.client.report [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 725.708658] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.528s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.709349] env[69027]: DEBUG nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 725.742697] env[69027]: DEBUG nova.compute.utils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 725.743970] env[69027]: DEBUG nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 725.744142] env[69027]: DEBUG nova.network.neutron [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 725.753079] env[69027]: DEBUG nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 725.816640] env[69027]: DEBUG nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 725.833411] env[69027]: DEBUG nova.policy [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd31e55f937a84ec1a8868ccd1cfc6c6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f9dec99d89ef4ee3a43b5242fced36a8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 725.842963] env[69027]: DEBUG nova.virt.hardware [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=192,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 725.843222] env[69027]: DEBUG nova.virt.hardware [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 725.843384] env[69027]: DEBUG nova.virt.hardware [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.843565] env[69027]: DEBUG nova.virt.hardware [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 725.843717] env[69027]: DEBUG nova.virt.hardware [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.843861] env[69027]: DEBUG nova.virt.hardware [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 725.844079] env[69027]: DEBUG nova.virt.hardware [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 725.844244] env[69027]: DEBUG nova.virt.hardware [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 725.844409] env[69027]: DEBUG nova.virt.hardware [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 725.844569] env[69027]: DEBUG nova.virt.hardware [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 725.844740] env[69027]: DEBUG nova.virt.hardware [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 725.845606] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7e34bd-4746-4e87-8c8b-82fa1fe2db25 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.855969] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ccd1e4c-76e3-4681-939f-724d23d67fad {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.209089] env[69027]: DEBUG nova.network.neutron [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Successfully created port: 7f203d32-9d1b-45d8-89cd-b7d6ac357411 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.248385] env[69027]: DEBUG nova.network.neutron [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Successfully updated port: 7f203d32-9d1b-45d8-89cd-b7d6ac357411 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 727.259464] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "refresh_cache-d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.260812] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired lock "refresh_cache-d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.260812] env[69027]: DEBUG nova.network.neutron [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 727.322786] env[69027]: DEBUG nova.network.neutron [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 727.492724] env[69027]: DEBUG nova.compute.manager [req-80039fce-4741-4df0-b273-d1c7f3ceb2c6 req-05fbefb5-df0f-495b-9284-d24f939f0b06 service nova] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Received event network-vif-plugged-7f203d32-9d1b-45d8-89cd-b7d6ac357411 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 727.492947] env[69027]: DEBUG oslo_concurrency.lockutils [req-80039fce-4741-4df0-b273-d1c7f3ceb2c6 req-05fbefb5-df0f-495b-9284-d24f939f0b06 service nova] Acquiring lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.493175] env[69027]: DEBUG oslo_concurrency.lockutils [req-80039fce-4741-4df0-b273-d1c7f3ceb2c6 req-05fbefb5-df0f-495b-9284-d24f939f0b06 service nova] Lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.493417] env[69027]: DEBUG oslo_concurrency.lockutils [req-80039fce-4741-4df0-b273-d1c7f3ceb2c6 req-05fbefb5-df0f-495b-9284-d24f939f0b06 service nova] Lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.493506] env[69027]: DEBUG nova.compute.manager [req-80039fce-4741-4df0-b273-d1c7f3ceb2c6 req-05fbefb5-df0f-495b-9284-d24f939f0b06 service nova] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] No waiting events found dispatching network-vif-plugged-7f203d32-9d1b-45d8-89cd-b7d6ac357411 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 727.493803] env[69027]: WARNING nova.compute.manager [req-80039fce-4741-4df0-b273-d1c7f3ceb2c6 req-05fbefb5-df0f-495b-9284-d24f939f0b06 service nova] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Received unexpected event network-vif-plugged-7f203d32-9d1b-45d8-89cd-b7d6ac357411 for instance with vm_state building and task_state spawning. [ 727.579661] env[69027]: DEBUG nova.network.neutron [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Updating instance_info_cache with network_info: [{"id": "7f203d32-9d1b-45d8-89cd-b7d6ac357411", "address": "fa:16:3e:f9:1a:97", "network": {"id": "53d6d286-8793-449d-b16d-e57e8ae5ac67", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1420004536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9dec99d89ef4ee3a43b5242fced36a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f203d32-9d", "ovs_interfaceid": "7f203d32-9d1b-45d8-89cd-b7d6ac357411", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.596325] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Releasing lock "refresh_cache-d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.596556] env[69027]: DEBUG nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Instance network_info: |[{"id": "7f203d32-9d1b-45d8-89cd-b7d6ac357411", "address": "fa:16:3e:f9:1a:97", "network": {"id": "53d6d286-8793-449d-b16d-e57e8ae5ac67", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1420004536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9dec99d89ef4ee3a43b5242fced36a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f203d32-9d", "ovs_interfaceid": "7f203d32-9d1b-45d8-89cd-b7d6ac357411", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 727.599095] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:1a:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4a2b284a-a29c-478f-b763-c9b5821e20ec', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f203d32-9d1b-45d8-89cd-b7d6ac357411', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 727.604783] env[69027]: DEBUG oslo.service.loopingcall [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 727.605305] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 727.605523] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9eed00c-a3d2-4291-8904-8b624af0ee2b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.626679] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 727.626679] env[69027]: value = "task-3395102" [ 727.626679] env[69027]: _type = "Task" [ 727.626679] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.635228] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395102, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.137387] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395102, 'name': CreateVM_Task, 'duration_secs': 0.283138} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.137598] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 728.138383] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.138696] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.139013] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 728.139299] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90e0bcfb-d565-43ee-9ecf-39aeb262a66a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.144069] env[69027]: DEBUG oslo_vmware.api [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 728.144069] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5291a1a6-04e2-3eb4-8e33-d839961719d5" [ 728.144069] env[69027]: _type = "Task" [ 728.144069] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.151982] env[69027]: DEBUG oslo_vmware.api [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5291a1a6-04e2-3eb4-8e33-d839961719d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.656149] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.656620] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 728.656620] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.556277] env[69027]: DEBUG nova.compute.manager [req-62fb79ba-df38-4feb-952a-b22750a49abf req-bc3a761c-6f90-4902-9248-9c6524a0a815 service nova] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Received event network-changed-7f203d32-9d1b-45d8-89cd-b7d6ac357411 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 729.556561] env[69027]: DEBUG nova.compute.manager [req-62fb79ba-df38-4feb-952a-b22750a49abf req-bc3a761c-6f90-4902-9248-9c6524a0a815 service nova] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Refreshing instance network info cache due to event network-changed-7f203d32-9d1b-45d8-89cd-b7d6ac357411. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 729.556785] env[69027]: DEBUG oslo_concurrency.lockutils [req-62fb79ba-df38-4feb-952a-b22750a49abf req-bc3a761c-6f90-4902-9248-9c6524a0a815 service nova] Acquiring lock "refresh_cache-d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 729.556929] env[69027]: DEBUG oslo_concurrency.lockutils [req-62fb79ba-df38-4feb-952a-b22750a49abf req-bc3a761c-6f90-4902-9248-9c6524a0a815 service nova] Acquired lock "refresh_cache-d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.558242] env[69027]: DEBUG nova.network.neutron [req-62fb79ba-df38-4feb-952a-b22750a49abf req-bc3a761c-6f90-4902-9248-9c6524a0a815 service nova] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Refreshing network info cache for port 7f203d32-9d1b-45d8-89cd-b7d6ac357411 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 729.936709] env[69027]: DEBUG nova.network.neutron [req-62fb79ba-df38-4feb-952a-b22750a49abf req-bc3a761c-6f90-4902-9248-9c6524a0a815 service nova] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Updated VIF entry in instance network info cache for port 7f203d32-9d1b-45d8-89cd-b7d6ac357411. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 729.937096] env[69027]: DEBUG nova.network.neutron [req-62fb79ba-df38-4feb-952a-b22750a49abf req-bc3a761c-6f90-4902-9248-9c6524a0a815 service nova] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Updating instance_info_cache with network_info: [{"id": "7f203d32-9d1b-45d8-89cd-b7d6ac357411", "address": "fa:16:3e:f9:1a:97", "network": {"id": "53d6d286-8793-449d-b16d-e57e8ae5ac67", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1420004536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f9dec99d89ef4ee3a43b5242fced36a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4a2b284a-a29c-478f-b763-c9b5821e20ec", "external-id": "nsx-vlan-transportzone-691", "segmentation_id": 691, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f203d32-9d", "ovs_interfaceid": "7f203d32-9d1b-45d8-89cd-b7d6ac357411", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.951148] env[69027]: DEBUG oslo_concurrency.lockutils [req-62fb79ba-df38-4feb-952a-b22750a49abf req-bc3a761c-6f90-4902-9248-9c6524a0a815 service nova] Releasing lock "refresh_cache-d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.103153] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "4ed95b65-233e-406e-8d27-2a5cd2694184" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.103461] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "4ed95b65-233e-406e-8d27-2a5cd2694184" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.490684] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "fbd6a238-1662-4c22-86ab-d31d4bb82734" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.490932] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "fbd6a238-1662-4c22-86ab-d31d4bb82734" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.766339] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 767.771009] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 767.784935] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.785163] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.785345] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.785506] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 767.786955] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e89faf9-ff86-4934-8a6f-3fff7426b191 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.795267] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c5a226-01e8-4799-b09f-b5bb9d1a0634 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.808662] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce5e115-3846-4e2d-b9fe-8d9531ee6890 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.814814] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c622f46-cf49-4b09-9fa0-aef4d67780ae {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.844380] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180984MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 767.844540] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.844731] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.919686] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 2e16f12d-194a-47a5-824d-062a684a86f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.919865] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ad409c8-465f-4106-946a-7f401358d5a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.919984] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ae5e2ca1-75e2-4023-b297-4cc265f038e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.920125] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9ff1dac6-b328-42c3-babe-86aef27466c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.920244] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance aec054f1-0d52-49be-9dee-8db0ae362f12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.920361] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 6ad953b9-4ded-42cd-86e0-2b1b707be4e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.920478] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 7c4204b8-2858-43a5-855d-c99b00e91d0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.920593] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 339bab90-238a-47ab-89f5-1ff9541ec14d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.920709] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b6a38a84-0b95-494c-a423-3360824ed8d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.920820] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 767.932857] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.943833] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a00267a5-aa1c-434f-8201-157481bc0801 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.953627] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3aef6e75-f7bc-4a2c-aac0-daa7cfa38b87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.963470] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 45dfaf74-5be6-4c63-9efc-4717d12e3d2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.975530] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b2b068c1-defd-4ee8-8611-9016924fc223 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.987183] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 40937830-3e23-49ff-aeb6-cdd6b62b0614 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.996016] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c2497714-9a08-4ab0-9371-33060724f9d6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.007417] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 7d2205d5-ed82-49cc-960b-b9da9584144a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.018648] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ded28a13-a8c2-45aa-978e-2a1de389d958 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.028263] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance db769146-b610-4d0a-8329-b977d8450a27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.037631] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 0270a5ec-007f-4f7a-a6c6-05163c052452 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.047729] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 0522ed67-caaf-4018-b35f-252a6bbd2644 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.057442] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d14a7134-97a4-47e1-a49c-84e0189ecfec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.067442] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d2ae8743-b98e-403e-acc3-1be0eda5825b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.079366] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 887fbbeb-c981-4cc3-94e6-c232774507c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.090158] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 8e40eb4c-9c1e-4147-beb3-0d5fa311b9d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.100913] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 918f38e1-e1b4-40d7-a79c-49257a814941 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.111590] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 5bdb4d11-9125-4c28-89b0-8fd9147e0c0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.122011] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4927121e-a71e-47e9-9475-603096e82492 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.132497] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.144219] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbc37887-513b-4b67-915f-e6862ea585e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.155981] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c42ce6eb-a29d-4e16-b5e6-ee507bd58819 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.165928] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 59c731c3-8604-481b-b761-29a5251411f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.179901] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance caee2e65-98e5-424d-8dd8-057732b921fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.188857] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 695f7335-8e1d-4d7f-a377-8666b12e30f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.198960] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ed95b65-233e-406e-8d27-2a5cd2694184 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.208572] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbd6a238-1662-4c22-86ab-d31d4bb82734 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.208872] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 768.209052] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 768.618009] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af545ef8-98cf-4b9d-a044-6dd4bf980df8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.626647] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f61a6e0-c4d0-4a95-bee6-39861a30a724 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.656232] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7625e5de-54bf-4f51-bc2d-9d9847fca30e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.664767] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2914f5-853a-4dab-8957-172c96e5ff59 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.676376] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.684852] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 768.698469] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 768.698649] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.854s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.699610] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 769.772845] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 769.772845] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 769.772845] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 769.792323] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 769.793750] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 769.793963] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 769.794155] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 769.794348] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 769.794476] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 769.794630] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 769.794781] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 769.794932] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 769.795094] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 769.795247] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 770.771423] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 770.771668] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 770.774028] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 770.774028] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 771.767764] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 771.788636] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.772620] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 773.925143] env[69027]: WARNING oslo_vmware.rw_handles [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 773.925143] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 773.925143] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 773.925143] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 773.925143] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 773.925143] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 773.925143] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 773.925143] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 773.925143] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 773.925143] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 773.925143] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 773.925143] env[69027]: ERROR oslo_vmware.rw_handles [ 773.925546] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/fc29c63e-2edb-41a6-8a6b-0604a5e89dc6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 773.927334] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 773.927604] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Copying Virtual Disk [datastore2] vmware_temp/fc29c63e-2edb-41a6-8a6b-0604a5e89dc6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/fc29c63e-2edb-41a6-8a6b-0604a5e89dc6/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 773.927904] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-870d0e8e-56f6-4922-a63d-974322e35855 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.935848] env[69027]: DEBUG oslo_vmware.api [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Waiting for the task: (returnval){ [ 773.935848] env[69027]: value = "task-3395103" [ 773.935848] env[69027]: _type = "Task" [ 773.935848] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.943264] env[69027]: DEBUG oslo_vmware.api [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Task: {'id': task-3395103, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.447052] env[69027]: DEBUG oslo_vmware.exceptions [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 774.447052] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.447272] env[69027]: ERROR nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 774.447272] env[69027]: Faults: ['InvalidArgument'] [ 774.447272] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Traceback (most recent call last): [ 774.447272] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 774.447272] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] yield resources [ 774.447272] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 774.447272] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] self.driver.spawn(context, instance, image_meta, [ 774.447272] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 774.447272] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 774.447272] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 774.447272] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] self._fetch_image_if_missing(context, vi) [ 774.447272] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] image_cache(vi, tmp_image_ds_loc) [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] vm_util.copy_virtual_disk( [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] session._wait_for_task(vmdk_copy_task) [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] return self.wait_for_task(task_ref) [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] return evt.wait() [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] result = hub.switch() [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 774.447643] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] return self.greenlet.switch() [ 774.448038] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 774.448038] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] self.f(*self.args, **self.kw) [ 774.448038] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 774.448038] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] raise exceptions.translate_fault(task_info.error) [ 774.448038] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 774.448038] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Faults: ['InvalidArgument'] [ 774.448038] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] [ 774.448038] env[69027]: INFO nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Terminating instance [ 774.449409] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.449409] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.449648] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78bcec67-5f28-4638-ac30-41783ab01bbe {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.451974] env[69027]: DEBUG nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 774.452189] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 774.452899] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9293abff-0e77-446e-89ed-99aba1f2f665 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.460113] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 774.460341] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26c5df72-1190-46ec-aa11-15ec192e9644 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.462492] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.462668] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 774.463594] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae37161b-17f0-4fe8-acb6-a7f1603aa20d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.468243] env[69027]: DEBUG oslo_vmware.api [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Waiting for the task: (returnval){ [ 774.468243] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]525151fa-39e8-5cbf-77b4-7c97e6eb3efb" [ 774.468243] env[69027]: _type = "Task" [ 774.468243] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.475291] env[69027]: DEBUG oslo_vmware.api [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]525151fa-39e8-5cbf-77b4-7c97e6eb3efb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.531106] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 774.531302] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 774.531493] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Deleting the datastore file [datastore2] 2e16f12d-194a-47a5-824d-062a684a86f8 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.531762] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7f0d4d7-2372-4c1a-b305-98c9bd95d8b7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.537949] env[69027]: DEBUG oslo_vmware.api [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Waiting for the task: (returnval){ [ 774.537949] env[69027]: value = "task-3395105" [ 774.537949] env[69027]: _type = "Task" [ 774.537949] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.546606] env[69027]: DEBUG oslo_vmware.api [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Task: {'id': task-3395105, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.978916] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 774.979248] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Creating directory with path [datastore2] vmware_temp/956c3bd5-0c52-4d99-b47e-5bdca52ef31d/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.979442] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b58e059-9cb1-4902-ae1f-f14ca5732187 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.025082] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Created directory with path [datastore2] vmware_temp/956c3bd5-0c52-4d99-b47e-5bdca52ef31d/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 775.025296] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Fetch image to [datastore2] vmware_temp/956c3bd5-0c52-4d99-b47e-5bdca52ef31d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 775.025481] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/956c3bd5-0c52-4d99-b47e-5bdca52ef31d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 775.026400] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dba288-d9a1-410a-b635-c1e58340422e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.033239] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ef4646-4542-457a-9c28-60e0dd306cd2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.044756] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a812d4b8-1c1e-468d-914c-0224a379a4a0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.052377] env[69027]: DEBUG oslo_vmware.api [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Task: {'id': task-3395105, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07969} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.077022] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 775.077238] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 775.077410] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 775.077588] env[69027]: INFO nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Took 0.63 seconds to destroy the instance on the hypervisor. [ 775.080032] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bcf47a-cc22-4e51-b595-fb9be5d8f886 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.082459] env[69027]: DEBUG nova.compute.claims [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 775.082635] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.082846] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.088671] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-53d2956f-f930-4f44-926d-eeb6555e9433 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.175415] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 775.230948] env[69027]: DEBUG oslo_vmware.rw_handles [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/956c3bd5-0c52-4d99-b47e-5bdca52ef31d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 775.295963] env[69027]: DEBUG oslo_vmware.rw_handles [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 775.295963] env[69027]: DEBUG oslo_vmware.rw_handles [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/956c3bd5-0c52-4d99-b47e-5bdca52ef31d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 775.599594] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559a525d-3a7c-45d1-91a1-db454e7791e8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.606832] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50db8f81-e2c1-4f3d-82b0-bbea1cdca711 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.636662] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3057f1-22c4-40d1-8fb0-e195b03b724b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.644052] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44023a2c-054c-4a77-9787-d648ac66c15c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.656520] env[69027]: DEBUG nova.compute.provider_tree [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.664675] env[69027]: DEBUG nova.scheduler.client.report [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 775.679898] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.597s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.680462] env[69027]: ERROR nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 775.680462] env[69027]: Faults: ['InvalidArgument'] [ 775.680462] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Traceback (most recent call last): [ 775.680462] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 775.680462] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] self.driver.spawn(context, instance, image_meta, [ 775.680462] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 775.680462] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 775.680462] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 775.680462] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] self._fetch_image_if_missing(context, vi) [ 775.680462] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 775.680462] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] image_cache(vi, tmp_image_ds_loc) [ 775.680462] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] vm_util.copy_virtual_disk( [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] session._wait_for_task(vmdk_copy_task) [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] return self.wait_for_task(task_ref) [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] return evt.wait() [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] result = hub.switch() [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] return self.greenlet.switch() [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 775.680829] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] self.f(*self.args, **self.kw) [ 775.681195] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 775.681195] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] raise exceptions.translate_fault(task_info.error) [ 775.681195] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 775.681195] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Faults: ['InvalidArgument'] [ 775.681195] env[69027]: ERROR nova.compute.manager [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] [ 775.681195] env[69027]: DEBUG nova.compute.utils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 775.682603] env[69027]: DEBUG nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Build of instance 2e16f12d-194a-47a5-824d-062a684a86f8 was re-scheduled: A specified parameter was not correct: fileType [ 775.682603] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 775.682977] env[69027]: DEBUG nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 775.683174] env[69027]: DEBUG nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 775.683333] env[69027]: DEBUG nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 775.683497] env[69027]: DEBUG nova.network.neutron [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 776.077872] env[69027]: DEBUG nova.network.neutron [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.091715] env[69027]: INFO nova.compute.manager [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] [instance: 2e16f12d-194a-47a5-824d-062a684a86f8] Took 0.41 seconds to deallocate network for instance. [ 776.190354] env[69027]: INFO nova.scheduler.client.report [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Deleted allocations for instance 2e16f12d-194a-47a5-824d-062a684a86f8 [ 776.213264] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eb23e0ef-65f1-4112-bc34-ffb0c2f11aa7 tempest-ServerExternalEventsTest-87630704 tempest-ServerExternalEventsTest-87630704-project-member] Lock "2e16f12d-194a-47a5-824d-062a684a86f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.885s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.232496] env[69027]: DEBUG nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 776.285161] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.285432] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 776.287005] env[69027]: INFO nova.compute.claims [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 776.746277] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5adce759-ccad-4619-9ece-26bb8de6a578 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.754190] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e341c5f7-1391-4b20-aa2f-966b3b4e95b9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.784771] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63239a3b-db39-4092-89a9-4d1b03bc9588 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.792853] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf68aa6d-9e2a-4949-9256-eddc62314737 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.809767] env[69027]: DEBUG nova.compute.provider_tree [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.819238] env[69027]: DEBUG nova.scheduler.client.report [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 776.834164] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.549s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 776.834669] env[69027]: DEBUG nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 776.870591] env[69027]: DEBUG nova.compute.utils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 776.872154] env[69027]: DEBUG nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 776.872314] env[69027]: DEBUG nova.network.neutron [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 776.880319] env[69027]: DEBUG nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 776.945909] env[69027]: DEBUG nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 776.948598] env[69027]: DEBUG nova.policy [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56037359ca2045c4aac48662dfd7477a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '887eab30aaec49068e3ddf5d768a313c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 776.975483] env[69027]: DEBUG nova.virt.hardware [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 776.975723] env[69027]: DEBUG nova.virt.hardware [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 776.975880] env[69027]: DEBUG nova.virt.hardware [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.976084] env[69027]: DEBUG nova.virt.hardware [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 776.976239] env[69027]: DEBUG nova.virt.hardware [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.976451] env[69027]: DEBUG nova.virt.hardware [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 776.976690] env[69027]: DEBUG nova.virt.hardware [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 776.976862] env[69027]: DEBUG nova.virt.hardware [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 776.977037] env[69027]: DEBUG nova.virt.hardware [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 776.977207] env[69027]: DEBUG nova.virt.hardware [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 776.977379] env[69027]: DEBUG nova.virt.hardware [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 776.978249] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5269a436-37e4-4bb0-9703-a579fb658699 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.986804] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661d2825-ff14-44d3-86dd-fef3262b4436 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.299135] env[69027]: DEBUG nova.network.neutron [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Successfully created port: 5e31becc-f1af-4c23-b622-44831a7ccc04 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 778.394325] env[69027]: DEBUG nova.compute.manager [req-7ca2981b-f7da-451b-8970-41c6d16f2278 req-d5a0a86c-9d8e-43ec-a0bf-1208854fd136 service nova] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Received event network-vif-plugged-5e31becc-f1af-4c23-b622-44831a7ccc04 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 778.394570] env[69027]: DEBUG oslo_concurrency.lockutils [req-7ca2981b-f7da-451b-8970-41c6d16f2278 req-d5a0a86c-9d8e-43ec-a0bf-1208854fd136 service nova] Acquiring lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.394795] env[69027]: DEBUG oslo_concurrency.lockutils [req-7ca2981b-f7da-451b-8970-41c6d16f2278 req-d5a0a86c-9d8e-43ec-a0bf-1208854fd136 service nova] Lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.394906] env[69027]: DEBUG oslo_concurrency.lockutils [req-7ca2981b-f7da-451b-8970-41c6d16f2278 req-d5a0a86c-9d8e-43ec-a0bf-1208854fd136 service nova] Lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.395075] env[69027]: DEBUG nova.compute.manager [req-7ca2981b-f7da-451b-8970-41c6d16f2278 req-d5a0a86c-9d8e-43ec-a0bf-1208854fd136 service nova] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] No waiting events found dispatching network-vif-plugged-5e31becc-f1af-4c23-b622-44831a7ccc04 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 778.395249] env[69027]: WARNING nova.compute.manager [req-7ca2981b-f7da-451b-8970-41c6d16f2278 req-d5a0a86c-9d8e-43ec-a0bf-1208854fd136 service nova] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Received unexpected event network-vif-plugged-5e31becc-f1af-4c23-b622-44831a7ccc04 for instance with vm_state building and task_state spawning. [ 778.459184] env[69027]: DEBUG nova.network.neutron [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Successfully updated port: 5e31becc-f1af-4c23-b622-44831a7ccc04 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 778.472696] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "refresh_cache-362a7b3c-f0b2-46e6-a9fa-2c284a059d73" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.472858] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquired lock "refresh_cache-362a7b3c-f0b2-46e6-a9fa-2c284a059d73" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.473094] env[69027]: DEBUG nova.network.neutron [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 778.533430] env[69027]: DEBUG nova.network.neutron [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 778.785596] env[69027]: DEBUG nova.network.neutron [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Updating instance_info_cache with network_info: [{"id": "5e31becc-f1af-4c23-b622-44831a7ccc04", "address": "fa:16:3e:97:0d:14", "network": {"id": "1a9f38e8-1c72-43ff-9c4d-cde922e23d6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-938644940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "887eab30aaec49068e3ddf5d768a313c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e31becc-f1", "ovs_interfaceid": "5e31becc-f1af-4c23-b622-44831a7ccc04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.803121] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Releasing lock "refresh_cache-362a7b3c-f0b2-46e6-a9fa-2c284a059d73" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.803265] env[69027]: DEBUG nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Instance network_info: |[{"id": "5e31becc-f1af-4c23-b622-44831a7ccc04", "address": "fa:16:3e:97:0d:14", "network": {"id": "1a9f38e8-1c72-43ff-9c4d-cde922e23d6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-938644940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "887eab30aaec49068e3ddf5d768a313c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e31becc-f1", "ovs_interfaceid": "5e31becc-f1af-4c23-b622-44831a7ccc04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 778.803682] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:0d:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '257e5ea7-8b80-4301-9900-a754f1fe2031', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e31becc-f1af-4c23-b622-44831a7ccc04', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 778.812861] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Creating folder: Project (887eab30aaec49068e3ddf5d768a313c). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 778.813570] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58077655-327c-4be8-9a08-3b6e0c4e1cbe {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.824682] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Created folder: Project (887eab30aaec49068e3ddf5d768a313c) in parent group-v677321. [ 778.824682] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Creating folder: Instances. Parent ref: group-v677359. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 778.824769] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f2ad8e3-394a-4665-b253-f94c75080476 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.833824] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Created folder: Instances in parent group-v677359. [ 778.834120] env[69027]: DEBUG oslo.service.loopingcall [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 778.834356] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 778.834597] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78deb269-98b8-438e-9ea6-b3339cc6cedc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.856903] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 778.856903] env[69027]: value = "task-3395108" [ 778.856903] env[69027]: _type = "Task" [ 778.856903] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.872737] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395108, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.370570] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395108, 'name': CreateVM_Task, 'duration_secs': 0.295681} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.370775] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 779.371551] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.371764] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.372177] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 779.372470] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f25e93ea-a8ee-48e0-a725-8bda99a0f8fb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.377657] env[69027]: DEBUG oslo_vmware.api [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for the task: (returnval){ [ 779.377657] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]521cd60e-1eaa-a951-9ceb-9be767d37040" [ 779.377657] env[69027]: _type = "Task" [ 779.377657] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.390912] env[69027]: DEBUG oslo_vmware.api [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]521cd60e-1eaa-a951-9ceb-9be767d37040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.889650] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.890739] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 779.890739] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.537955] env[69027]: DEBUG nova.compute.manager [req-95101a09-6e2c-42c7-8d7d-4e06c7f03999 req-eb5314a3-b777-4d16-8c29-bb1d059d89f6 service nova] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Received event network-changed-5e31becc-f1af-4c23-b622-44831a7ccc04 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 780.538195] env[69027]: DEBUG nova.compute.manager [req-95101a09-6e2c-42c7-8d7d-4e06c7f03999 req-eb5314a3-b777-4d16-8c29-bb1d059d89f6 service nova] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Refreshing instance network info cache due to event network-changed-5e31becc-f1af-4c23-b622-44831a7ccc04. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 780.538395] env[69027]: DEBUG oslo_concurrency.lockutils [req-95101a09-6e2c-42c7-8d7d-4e06c7f03999 req-eb5314a3-b777-4d16-8c29-bb1d059d89f6 service nova] Acquiring lock "refresh_cache-362a7b3c-f0b2-46e6-a9fa-2c284a059d73" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.538540] env[69027]: DEBUG oslo_concurrency.lockutils [req-95101a09-6e2c-42c7-8d7d-4e06c7f03999 req-eb5314a3-b777-4d16-8c29-bb1d059d89f6 service nova] Acquired lock "refresh_cache-362a7b3c-f0b2-46e6-a9fa-2c284a059d73" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.538699] env[69027]: DEBUG nova.network.neutron [req-95101a09-6e2c-42c7-8d7d-4e06c7f03999 req-eb5314a3-b777-4d16-8c29-bb1d059d89f6 service nova] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Refreshing network info cache for port 5e31becc-f1af-4c23-b622-44831a7ccc04 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 781.327723] env[69027]: DEBUG nova.network.neutron [req-95101a09-6e2c-42c7-8d7d-4e06c7f03999 req-eb5314a3-b777-4d16-8c29-bb1d059d89f6 service nova] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Updated VIF entry in instance network info cache for port 5e31becc-f1af-4c23-b622-44831a7ccc04. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 781.328148] env[69027]: DEBUG nova.network.neutron [req-95101a09-6e2c-42c7-8d7d-4e06c7f03999 req-eb5314a3-b777-4d16-8c29-bb1d059d89f6 service nova] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Updating instance_info_cache with network_info: [{"id": "5e31becc-f1af-4c23-b622-44831a7ccc04", "address": "fa:16:3e:97:0d:14", "network": {"id": "1a9f38e8-1c72-43ff-9c4d-cde922e23d6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-938644940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "887eab30aaec49068e3ddf5d768a313c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e31becc-f1", "ovs_interfaceid": "5e31becc-f1af-4c23-b622-44831a7ccc04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.337828] env[69027]: DEBUG oslo_concurrency.lockutils [req-95101a09-6e2c-42c7-8d7d-4e06c7f03999 req-eb5314a3-b777-4d16-8c29-bb1d059d89f6 service nova] Releasing lock "refresh_cache-362a7b3c-f0b2-46e6-a9fa-2c284a059d73" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.718509] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquiring lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.721443] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.185955] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquiring lock "4ad409c8-465f-4106-946a-7f401358d5a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.627746] env[69027]: DEBUG oslo_concurrency.lockutils [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquiring lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.471635] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquiring lock "aec054f1-0d52-49be-9dee-8db0ae362f12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.251372] env[69027]: DEBUG oslo_concurrency.lockutils [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.535960] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "b6a38a84-0b95-494c-a423-3360824ed8d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.297973] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.044946] env[69027]: DEBUG oslo_concurrency.lockutils [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.790644] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c8674dba-4b6c-4296-997e-d15b6498d5eb tempest-ServerShowV247Test-1206906005 tempest-ServerShowV247Test-1206906005-project-member] Acquiring lock "405e3683-a1c1-4452-91df-4e52ebf25b65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.790644] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c8674dba-4b6c-4296-997e-d15b6498d5eb tempest-ServerShowV247Test-1206906005 tempest-ServerShowV247Test-1206906005-project-member] Lock "405e3683-a1c1-4452-91df-4e52ebf25b65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.625336] env[69027]: DEBUG oslo_concurrency.lockutils [None req-fed35b1c-07ef-4685-995f-10090cad0780 tempest-ServerShowV247Test-1206906005 tempest-ServerShowV247Test-1206906005-project-member] Acquiring lock "c49c78cd-90f2-4157-8938-88492ae533ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.625336] env[69027]: DEBUG oslo_concurrency.lockutils [None req-fed35b1c-07ef-4685-995f-10090cad0780 tempest-ServerShowV247Test-1206906005 tempest-ServerShowV247Test-1206906005-project-member] Lock "c49c78cd-90f2-4157-8938-88492ae533ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.330099] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bae9b02a-65bc-4934-9466-0ff6649a1f7c tempest-ServersTestFqdnHostnames-901269648 tempest-ServersTestFqdnHostnames-901269648-project-member] Acquiring lock "1fe37f8b-8fac-42fe-ac15-ac1d198d7dfd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.330418] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bae9b02a-65bc-4934-9466-0ff6649a1f7c tempest-ServersTestFqdnHostnames-901269648 tempest-ServersTestFqdnHostnames-901269648-project-member] Lock "1fe37f8b-8fac-42fe-ac15-ac1d198d7dfd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.722850] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4dc64825-e834-4ebd-8a51-a9937616dee6 tempest-ServerShowV254Test-493597978 tempest-ServerShowV254Test-493597978-project-member] Acquiring lock "f86ab9df-e4a8-4515-81ec-a494446efa4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.723248] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4dc64825-e834-4ebd-8a51-a9937616dee6 tempest-ServerShowV254Test-493597978 tempest-ServerShowV254Test-493597978-project-member] Lock "f86ab9df-e4a8-4515-81ec-a494446efa4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.300965] env[69027]: WARNING oslo_vmware.rw_handles [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 824.300965] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 824.300965] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 824.300965] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 824.300965] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 824.300965] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 824.300965] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 824.300965] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 824.300965] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 824.300965] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 824.300965] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 824.300965] env[69027]: ERROR oslo_vmware.rw_handles [ 824.301549] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/956c3bd5-0c52-4d99-b47e-5bdca52ef31d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 824.303235] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 824.303499] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Copying Virtual Disk [datastore2] vmware_temp/956c3bd5-0c52-4d99-b47e-5bdca52ef31d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/956c3bd5-0c52-4d99-b47e-5bdca52ef31d/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 824.303806] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48cda687-b266-4f16-9667-24e8017fea18 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.311817] env[69027]: DEBUG oslo_vmware.api [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Waiting for the task: (returnval){ [ 824.311817] env[69027]: value = "task-3395109" [ 824.311817] env[69027]: _type = "Task" [ 824.311817] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.320485] env[69027]: DEBUG oslo_vmware.api [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Task: {'id': task-3395109, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.824182] env[69027]: DEBUG oslo_vmware.exceptions [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 824.824182] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.824778] env[69027]: ERROR nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 824.824778] env[69027]: Faults: ['InvalidArgument'] [ 824.824778] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Traceback (most recent call last): [ 824.824778] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 824.824778] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] yield resources [ 824.824778] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 824.824778] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] self.driver.spawn(context, instance, image_meta, [ 824.824778] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 824.824778] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 824.824778] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 824.824778] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] self._fetch_image_if_missing(context, vi) [ 824.824778] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] image_cache(vi, tmp_image_ds_loc) [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] vm_util.copy_virtual_disk( [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] session._wait_for_task(vmdk_copy_task) [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] return self.wait_for_task(task_ref) [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] return evt.wait() [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] result = hub.switch() [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 824.826553] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] return self.greenlet.switch() [ 824.826949] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 824.826949] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] self.f(*self.args, **self.kw) [ 824.826949] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 824.826949] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] raise exceptions.translate_fault(task_info.error) [ 824.826949] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 824.826949] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Faults: ['InvalidArgument'] [ 824.826949] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] [ 824.826949] env[69027]: INFO nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Terminating instance [ 824.826949] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.827227] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.827820] env[69027]: DEBUG nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 824.827820] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 824.828067] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f10eae2e-b966-4d1d-ac24-5714fefd569b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.830534] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1ef77f-111e-49ae-82b9-dbdd1e834638 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.838315] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 824.838696] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb2a1840-a409-4d3b-8bf1-64e18d75bff8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.844419] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.844419] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 824.844419] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d5456aa-3c5d-42dc-81e8-16e504bf2249 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.848452] env[69027]: DEBUG oslo_vmware.api [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Waiting for the task: (returnval){ [ 824.848452] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]525653bd-04cc-b95d-8fb6-964656d32491" [ 824.848452] env[69027]: _type = "Task" [ 824.848452] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.857385] env[69027]: DEBUG oslo_vmware.api [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]525653bd-04cc-b95d-8fb6-964656d32491, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.906359] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 824.906648] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 824.906845] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Deleting the datastore file [datastore2] 4ad409c8-465f-4106-946a-7f401358d5a3 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 824.907146] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bbd6edc-c1f6-48e1-9a9b-5b594de1d588 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.916049] env[69027]: DEBUG oslo_vmware.api [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Waiting for the task: (returnval){ [ 824.916049] env[69027]: value = "task-3395111" [ 824.916049] env[69027]: _type = "Task" [ 824.916049] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.924987] env[69027]: DEBUG oslo_vmware.api [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Task: {'id': task-3395111, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.079160] env[69027]: DEBUG oslo_concurrency.lockutils [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] Acquiring lock "12e2bac8-06ec-43ab-bb9d-9331789aaf10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.079720] env[69027]: DEBUG oslo_concurrency.lockutils [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] Lock "12e2bac8-06ec-43ab-bb9d-9331789aaf10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.366907] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 825.367225] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Creating directory with path [datastore2] vmware_temp/aadd63a4-baca-4c54-a822-150a09c380bb/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 825.367484] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-116bf8e6-dc7d-4412-b476-2464fe310a9d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.379888] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Created directory with path [datastore2] vmware_temp/aadd63a4-baca-4c54-a822-150a09c380bb/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 825.380109] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Fetch image to [datastore2] vmware_temp/aadd63a4-baca-4c54-a822-150a09c380bb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 825.380328] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/aadd63a4-baca-4c54-a822-150a09c380bb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 825.381192] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f59615-1474-4872-a6ff-d39cc0efda07 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.388868] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc42ae4-71be-45ce-a3b9-71f5c9b06588 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.408192] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4da2ba-c071-49d1-ba59-10dd735ef3dd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.439475] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8924859b-0d36-480d-98be-ceb7fdd49f8e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.446888] env[69027]: DEBUG oslo_vmware.api [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Task: {'id': task-3395111, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082429} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.448623] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 825.448973] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 825.449277] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 825.449579] env[69027]: INFO nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Took 0.62 seconds to destroy the instance on the hypervisor. [ 825.451514] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-459cc79f-f10d-4a83-920d-98c16c473a40 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.455464] env[69027]: DEBUG nova.compute.claims [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 825.455464] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.455464] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.543208] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 825.628551] env[69027]: DEBUG oslo_vmware.rw_handles [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aadd63a4-baca-4c54-a822-150a09c380bb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 825.694125] env[69027]: DEBUG oslo_vmware.rw_handles [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 825.694436] env[69027]: DEBUG oslo_vmware.rw_handles [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/aadd63a4-baca-4c54-a822-150a09c380bb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 826.063246] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de65ae67-505f-4eb0-8b9b-88e56c28b4fc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.077018] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5537cc54-9c7a-4bc2-a0e2-57e3a2722599 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.109347] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3304e5-6ffa-46cb-a567-4a0ce9d9fa71 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.117206] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311ed16b-7916-4965-9904-e52f33588307 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.134853] env[69027]: DEBUG nova.compute.provider_tree [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.151642] env[69027]: DEBUG nova.scheduler.client.report [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 826.175011] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.721s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.175796] env[69027]: ERROR nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 826.175796] env[69027]: Faults: ['InvalidArgument'] [ 826.175796] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Traceback (most recent call last): [ 826.175796] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 826.175796] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] self.driver.spawn(context, instance, image_meta, [ 826.175796] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 826.175796] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 826.175796] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 826.175796] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] self._fetch_image_if_missing(context, vi) [ 826.175796] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 826.175796] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] image_cache(vi, tmp_image_ds_loc) [ 826.175796] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] vm_util.copy_virtual_disk( [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] session._wait_for_task(vmdk_copy_task) [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] return self.wait_for_task(task_ref) [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] return evt.wait() [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] result = hub.switch() [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] return self.greenlet.switch() [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 826.176148] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] self.f(*self.args, **self.kw) [ 826.176542] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 826.176542] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] raise exceptions.translate_fault(task_info.error) [ 826.176542] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 826.176542] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Faults: ['InvalidArgument'] [ 826.176542] env[69027]: ERROR nova.compute.manager [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] [ 826.177592] env[69027]: DEBUG nova.compute.utils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 826.178820] env[69027]: DEBUG nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Build of instance 4ad409c8-465f-4106-946a-7f401358d5a3 was re-scheduled: A specified parameter was not correct: fileType [ 826.178820] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 826.179228] env[69027]: DEBUG nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 826.179431] env[69027]: DEBUG nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 826.180474] env[69027]: DEBUG nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 826.180474] env[69027]: DEBUG nova.network.neutron [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 826.938689] env[69027]: DEBUG nova.network.neutron [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.955650] env[69027]: INFO nova.compute.manager [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Took 0.78 seconds to deallocate network for instance. [ 827.091644] env[69027]: INFO nova.scheduler.client.report [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Deleted allocations for instance 4ad409c8-465f-4106-946a-7f401358d5a3 [ 827.116708] env[69027]: DEBUG oslo_concurrency.lockutils [None req-619a4596-0ca5-4848-a189-b2588fd1c87a tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Lock "4ad409c8-465f-4106-946a-7f401358d5a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.240s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.119055] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Lock "4ad409c8-465f-4106-946a-7f401358d5a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 42.933s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.119055] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Acquiring lock "4ad409c8-465f-4106-946a-7f401358d5a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.119350] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Lock "4ad409c8-465f-4106-946a-7f401358d5a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.119716] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Lock "4ad409c8-465f-4106-946a-7f401358d5a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.122185] env[69027]: INFO nova.compute.manager [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Terminating instance [ 827.124167] env[69027]: DEBUG nova.compute.manager [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 827.124372] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 827.124643] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e12f65cf-1263-485b-aaf9-5b9661258610 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.138384] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef93565-0644-4612-80b7-4364ee94ce61 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.151937] env[69027]: DEBUG nova.compute.manager [None req-c6c11a51-8958-4c94-9726-f5611cd3f0ce tempest-ServerDiagnosticsV248Test-1395699001 tempest-ServerDiagnosticsV248Test-1395699001-project-member] [instance: a00267a5-aa1c-434f-8201-157481bc0801] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 827.174607] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4ad409c8-465f-4106-946a-7f401358d5a3 could not be found. [ 827.175126] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 827.175126] env[69027]: INFO nova.compute.manager [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 827.175265] env[69027]: DEBUG oslo.service.loopingcall [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.175499] env[69027]: DEBUG nova.compute.manager [-] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 827.175593] env[69027]: DEBUG nova.network.neutron [-] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 827.189200] env[69027]: DEBUG nova.compute.manager [None req-c6c11a51-8958-4c94-9726-f5611cd3f0ce tempest-ServerDiagnosticsV248Test-1395699001 tempest-ServerDiagnosticsV248Test-1395699001-project-member] [instance: a00267a5-aa1c-434f-8201-157481bc0801] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 827.212930] env[69027]: DEBUG nova.network.neutron [-] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.219436] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c6c11a51-8958-4c94-9726-f5611cd3f0ce tempest-ServerDiagnosticsV248Test-1395699001 tempest-ServerDiagnosticsV248Test-1395699001-project-member] Lock "a00267a5-aa1c-434f-8201-157481bc0801" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.161s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.227034] env[69027]: INFO nova.compute.manager [-] [instance: 4ad409c8-465f-4106-946a-7f401358d5a3] Took 0.05 seconds to deallocate network for instance. [ 827.253846] env[69027]: DEBUG nova.compute.manager [None req-fd201bed-41c6-4137-a3b5-1086d66f4fd2 tempest-AttachInterfacesV270Test-947417479 tempest-AttachInterfacesV270Test-947417479-project-member] [instance: 3aef6e75-f7bc-4a2c-aac0-daa7cfa38b87] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 827.280599] env[69027]: DEBUG nova.compute.manager [None req-fd201bed-41c6-4137-a3b5-1086d66f4fd2 tempest-AttachInterfacesV270Test-947417479 tempest-AttachInterfacesV270Test-947417479-project-member] [instance: 3aef6e75-f7bc-4a2c-aac0-daa7cfa38b87] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 827.317044] env[69027]: DEBUG oslo_concurrency.lockutils [None req-fd201bed-41c6-4137-a3b5-1086d66f4fd2 tempest-AttachInterfacesV270Test-947417479 tempest-AttachInterfacesV270Test-947417479-project-member] Lock "3aef6e75-f7bc-4a2c-aac0-daa7cfa38b87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.726s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.327767] env[69027]: DEBUG nova.compute.manager [None req-544bbf2b-93d0-4ee6-8511-b0baf9535b3d tempest-ServersWithSpecificFlavorTestJSON-443080628 tempest-ServersWithSpecificFlavorTestJSON-443080628-project-member] [instance: 45dfaf74-5be6-4c63-9efc-4717d12e3d2c] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 827.377711] env[69027]: DEBUG nova.compute.manager [None req-544bbf2b-93d0-4ee6-8511-b0baf9535b3d tempest-ServersWithSpecificFlavorTestJSON-443080628 tempest-ServersWithSpecificFlavorTestJSON-443080628-project-member] [instance: 45dfaf74-5be6-4c63-9efc-4717d12e3d2c] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 827.403365] env[69027]: DEBUG oslo_concurrency.lockutils [None req-544bbf2b-93d0-4ee6-8511-b0baf9535b3d tempest-ServersWithSpecificFlavorTestJSON-443080628 tempest-ServersWithSpecificFlavorTestJSON-443080628-project-member] Lock "45dfaf74-5be6-4c63-9efc-4717d12e3d2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.765s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.405320] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a5fc933f-e744-44d6-9708-2b5d780c27b0 tempest-FloatingIPsAssociationTestJSON-1324190010 tempest-FloatingIPsAssociationTestJSON-1324190010-project-member] Lock "4ad409c8-465f-4106-946a-7f401358d5a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.287s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.414630] env[69027]: DEBUG nova.compute.manager [None req-98532783-bb57-4e53-a10a-a7b3fe7028b4 tempest-VolumesAssistedSnapshotsTest-380461100 tempest-VolumesAssistedSnapshotsTest-380461100-project-member] [instance: b2b068c1-defd-4ee8-8611-9016924fc223] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 827.446406] env[69027]: DEBUG nova.compute.manager [None req-98532783-bb57-4e53-a10a-a7b3fe7028b4 tempest-VolumesAssistedSnapshotsTest-380461100 tempest-VolumesAssistedSnapshotsTest-380461100-project-member] [instance: b2b068c1-defd-4ee8-8611-9016924fc223] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 827.478529] env[69027]: DEBUG oslo_concurrency.lockutils [None req-98532783-bb57-4e53-a10a-a7b3fe7028b4 tempest-VolumesAssistedSnapshotsTest-380461100 tempest-VolumesAssistedSnapshotsTest-380461100-project-member] Lock "b2b068c1-defd-4ee8-8611-9016924fc223" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.870s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.495077] env[69027]: DEBUG nova.compute.manager [None req-8a80f6cf-1a19-472a-bc00-f9df7b957f08 tempest-ServersV294TestFqdnHostnames-190221613 tempest-ServersV294TestFqdnHostnames-190221613-project-member] [instance: 40937830-3e23-49ff-aeb6-cdd6b62b0614] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 827.541101] env[69027]: DEBUG nova.compute.manager [None req-8a80f6cf-1a19-472a-bc00-f9df7b957f08 tempest-ServersV294TestFqdnHostnames-190221613 tempest-ServersV294TestFqdnHostnames-190221613-project-member] [instance: 40937830-3e23-49ff-aeb6-cdd6b62b0614] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 827.563686] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8a80f6cf-1a19-472a-bc00-f9df7b957f08 tempest-ServersV294TestFqdnHostnames-190221613 tempest-ServersV294TestFqdnHostnames-190221613-project-member] Lock "40937830-3e23-49ff-aeb6-cdd6b62b0614" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.380s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.575674] env[69027]: DEBUG nova.compute.manager [None req-34b0117f-f6c1-429b-a72a-f7137c70d885 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c2497714-9a08-4ab0-9371-33060724f9d6] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 827.600625] env[69027]: DEBUG nova.compute.manager [None req-34b0117f-f6c1-429b-a72a-f7137c70d885 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c2497714-9a08-4ab0-9371-33060724f9d6] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 827.635215] env[69027]: DEBUG oslo_concurrency.lockutils [None req-34b0117f-f6c1-429b-a72a-f7137c70d885 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "c2497714-9a08-4ab0-9371-33060724f9d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.233s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.645403] env[69027]: DEBUG nova.compute.manager [None req-9647e20c-5932-4cf2-b8fe-0eb20e9869d2 tempest-ServerGroupTestJSON-1038527388 tempest-ServerGroupTestJSON-1038527388-project-member] [instance: 7d2205d5-ed82-49cc-960b-b9da9584144a] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 827.680674] env[69027]: DEBUG nova.compute.manager [None req-9647e20c-5932-4cf2-b8fe-0eb20e9869d2 tempest-ServerGroupTestJSON-1038527388 tempest-ServerGroupTestJSON-1038527388-project-member] [instance: 7d2205d5-ed82-49cc-960b-b9da9584144a] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 827.714186] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9647e20c-5932-4cf2-b8fe-0eb20e9869d2 tempest-ServerGroupTestJSON-1038527388 tempest-ServerGroupTestJSON-1038527388-project-member] Lock "7d2205d5-ed82-49cc-960b-b9da9584144a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.264s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.727361] env[69027]: DEBUG nova.compute.manager [None req-a7b922fd-bb77-4038-b04c-02e33cca7cd9 tempest-InstanceActionsTestJSON-1423893352 tempest-InstanceActionsTestJSON-1423893352-project-member] [instance: ded28a13-a8c2-45aa-978e-2a1de389d958] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 827.761492] env[69027]: DEBUG nova.compute.manager [None req-a7b922fd-bb77-4038-b04c-02e33cca7cd9 tempest-InstanceActionsTestJSON-1423893352 tempest-InstanceActionsTestJSON-1423893352-project-member] [instance: ded28a13-a8c2-45aa-978e-2a1de389d958] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 827.766971] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.771156] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.771313] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 827.782092] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] There are 0 instances to clean {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 827.782527] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.782527] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances with incomplete migration {{(pid=69027) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 827.791909] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a7b922fd-bb77-4038-b04c-02e33cca7cd9 tempest-InstanceActionsTestJSON-1423893352 tempest-InstanceActionsTestJSON-1423893352-project-member] Lock "ded28a13-a8c2-45aa-978e-2a1de389d958" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.880s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.795161] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 827.804659] env[69027]: DEBUG nova.compute.manager [None req-f2e3a613-da1c-48fd-87b3-3a9b13573913 tempest-ServerActionsTestJSON-803946511 tempest-ServerActionsTestJSON-803946511-project-member] [instance: db769146-b610-4d0a-8329-b977d8450a27] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 827.834116] env[69027]: DEBUG nova.compute.manager [None req-f2e3a613-da1c-48fd-87b3-3a9b13573913 tempest-ServerActionsTestJSON-803946511 tempest-ServerActionsTestJSON-803946511-project-member] [instance: db769146-b610-4d0a-8329-b977d8450a27] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 827.871195] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f2e3a613-da1c-48fd-87b3-3a9b13573913 tempest-ServerActionsTestJSON-803946511 tempest-ServerActionsTestJSON-803946511-project-member] Lock "db769146-b610-4d0a-8329-b977d8450a27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.752s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.883030] env[69027]: DEBUG nova.compute.manager [None req-0f3af6e4-7f5d-4508-82e8-78d67eed5f9d tempest-ImagesNegativeTestJSON-1075311325 tempest-ImagesNegativeTestJSON-1075311325-project-member] [instance: 0270a5ec-007f-4f7a-a6c6-05163c052452] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 827.920530] env[69027]: DEBUG nova.compute.manager [None req-0f3af6e4-7f5d-4508-82e8-78d67eed5f9d tempest-ImagesNegativeTestJSON-1075311325 tempest-ImagesNegativeTestJSON-1075311325-project-member] [instance: 0270a5ec-007f-4f7a-a6c6-05163c052452] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 827.948627] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0f3af6e4-7f5d-4508-82e8-78d67eed5f9d tempest-ImagesNegativeTestJSON-1075311325 tempest-ImagesNegativeTestJSON-1075311325-project-member] Lock "0270a5ec-007f-4f7a-a6c6-05163c052452" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.722s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.967995] env[69027]: DEBUG nova.compute.manager [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] [instance: 0522ed67-caaf-4018-b35f-252a6bbd2644] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 827.999442] env[69027]: DEBUG nova.compute.manager [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] [instance: 0522ed67-caaf-4018-b35f-252a6bbd2644] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 828.026266] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] Lock "0522ed67-caaf-4018-b35f-252a6bbd2644" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.818s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.041522] env[69027]: DEBUG nova.compute.manager [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] [instance: d14a7134-97a4-47e1-a49c-84e0189ecfec] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 828.079075] env[69027]: DEBUG nova.compute.manager [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] [instance: d14a7134-97a4-47e1-a49c-84e0189ecfec] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 828.119515] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] Lock "d14a7134-97a4-47e1-a49c-84e0189ecfec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.875s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.134612] env[69027]: DEBUG nova.compute.manager [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] [instance: d2ae8743-b98e-403e-acc3-1be0eda5825b] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 828.163058] env[69027]: DEBUG nova.compute.manager [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] [instance: d2ae8743-b98e-403e-acc3-1be0eda5825b] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 828.198576] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9ad9d5aa-496e-4109-847d-ce2e5a052f9e tempest-ListServersNegativeTestJSON-227036224 tempest-ListServersNegativeTestJSON-227036224-project-member] Lock "d2ae8743-b98e-403e-acc3-1be0eda5825b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.901s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.212394] env[69027]: DEBUG nova.compute.manager [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 828.285186] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.285186] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.285186] env[69027]: INFO nova.compute.claims [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.522284] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Acquiring lock "887fbbeb-c981-4cc3-94e6-c232774507c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.805051] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3a15f5-936b-43a5-8928-41aaeee66081 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.813403] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaeaa31f-80dd-4678-8099-98267977dfee {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.847824] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e785915b-1996-420e-9aa4-0dfde208a0b0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.853520] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquiring lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.853783] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.859047] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e235de-ba08-4e24-a442-26cbf6609a32 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.873098] env[69027]: DEBUG nova.compute.provider_tree [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.883209] env[69027]: DEBUG nova.scheduler.client.report [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 828.898420] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.615s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.899165] env[69027]: DEBUG nova.compute.manager [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 828.942310] env[69027]: DEBUG nova.compute.claims [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 828.942524] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.942749] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.391430] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3458a3-2e95-4cb4-a7e4-b3e23769a2b8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.399800] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba720fc3-c581-49cb-a13a-0b6f4b67c23c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.432480] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e3e2ea-e21f-4d8e-89cd-df64dc29f7ae {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.439801] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8b7308-e6ed-4236-94c5-3f70237a7e97 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.452658] env[69027]: DEBUG nova.compute.provider_tree [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.466516] env[69027]: DEBUG nova.scheduler.client.report [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 829.494988] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.550s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.494988] env[69027]: DEBUG nova.compute.utils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Conflict updating instance 887fbbeb-c981-4cc3-94e6-c232774507c7. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 829.496527] env[69027]: DEBUG nova.compute.manager [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Instance disappeared during build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 829.496886] env[69027]: DEBUG nova.compute.manager [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 829.497237] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Acquiring lock "refresh_cache-887fbbeb-c981-4cc3-94e6-c232774507c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 829.497540] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Acquired lock "refresh_cache-887fbbeb-c981-4cc3-94e6-c232774507c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.497819] env[69027]: DEBUG nova.network.neutron [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 829.552138] env[69027]: DEBUG nova.network.neutron [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 829.801751] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.801974] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 829.802119] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 829.827365] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 829.827566] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 829.827657] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 829.827782] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 829.827903] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 829.828033] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 829.828155] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 829.828273] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 829.828389] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 829.828562] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 829.829022] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 829.838856] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.839083] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.839256] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.839437] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 829.840521] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5786c97b-d3fe-4f5e-8bd9-49b91340a645 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.849310] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5335a29-dbaa-4465-8d42-03e16b95100e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.870100] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610c19da-d7e1-40af-a14a-138e1f5a93b8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.876838] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7cce45-2435-46bd-b232-b9f2bd07f0ff {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.910222] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180983MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 829.910806] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.910806] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.005930] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ae5e2ca1-75e2-4023-b297-4cc265f038e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 830.006116] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9ff1dac6-b328-42c3-babe-86aef27466c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 830.006250] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance aec054f1-0d52-49be-9dee-8db0ae362f12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 830.006415] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 6ad953b9-4ded-42cd-86e0-2b1b707be4e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 830.006487] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 7c4204b8-2858-43a5-855d-c99b00e91d0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 830.006601] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 339bab90-238a-47ab-89f5-1ff9541ec14d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 830.006723] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b6a38a84-0b95-494c-a423-3360824ed8d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 830.006833] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 830.006948] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 830.020094] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 887fbbeb-c981-4cc3-94e6-c232774507c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.034721] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4927121e-a71e-47e9-9475-603096e82492 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.041987] env[69027]: DEBUG nova.network.neutron [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.054330] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.055911] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Releasing lock "refresh_cache-887fbbeb-c981-4cc3-94e6-c232774507c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.055911] env[69027]: DEBUG nova.compute.manager [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 830.056191] env[69027]: DEBUG nova.compute.manager [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 830.056312] env[69027]: DEBUG nova.network.neutron [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 830.066384] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbc37887-513b-4b67-915f-e6862ea585e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.088174] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c42ce6eb-a29d-4e16-b5e6-ee507bd58819 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.103353] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 59c731c3-8604-481b-b761-29a5251411f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.116925] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance caee2e65-98e5-424d-8dd8-057732b921fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.128359] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 695f7335-8e1d-4d7f-a377-8666b12e30f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.146392] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ed95b65-233e-406e-8d27-2a5cd2694184 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.161171] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbd6a238-1662-4c22-86ab-d31d4bb82734 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.175752] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.188550] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 405e3683-a1c1-4452-91df-4e52ebf25b65 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.201307] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c49c78cd-90f2-4157-8938-88492ae533ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.217551] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1fe37f8b-8fac-42fe-ac15-ac1d198d7dfd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.232474] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f86ab9df-e4a8-4515-81ec-a494446efa4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.245886] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 12e2bac8-06ec-43ab-bb9d-9331789aaf10 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.260370] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 830.260370] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 830.260370] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 830.375203] env[69027]: DEBUG nova.network.neutron [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 830.389931] env[69027]: DEBUG nova.network.neutron [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.411028] env[69027]: INFO nova.compute.manager [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Took 0.35 seconds to deallocate network for instance. [ 830.493719] env[69027]: INFO nova.scheduler.client.report [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Deleted allocations for instance 887fbbeb-c981-4cc3-94e6-c232774507c7 [ 830.494594] env[69027]: DEBUG oslo_concurrency.lockutils [None req-912c1dab-ae88-449d-9217-192cf60daadb tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Lock "887fbbeb-c981-4cc3-94e6-c232774507c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.330s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.495791] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Lock "887fbbeb-c981-4cc3-94e6-c232774507c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.975s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.496665] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Acquiring lock "887fbbeb-c981-4cc3-94e6-c232774507c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.497103] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Lock "887fbbeb-c981-4cc3-94e6-c232774507c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.497207] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Lock "887fbbeb-c981-4cc3-94e6-c232774507c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.502188] env[69027]: INFO nova.compute.manager [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Terminating instance [ 830.507608] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Acquiring lock "refresh_cache-887fbbeb-c981-4cc3-94e6-c232774507c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.507936] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Acquired lock "refresh_cache-887fbbeb-c981-4cc3-94e6-c232774507c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.508221] env[69027]: DEBUG nova.network.neutron [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 830.511118] env[69027]: DEBUG nova.compute.manager [None req-f0a8815b-8f4c-415f-8508-3ef4c6746492 tempest-ServerShowV257Test-1857541781 tempest-ServerShowV257Test-1857541781-project-member] [instance: 8e40eb4c-9c1e-4147-beb3-0d5fa311b9d9] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 830.542985] env[69027]: DEBUG nova.compute.manager [None req-f0a8815b-8f4c-415f-8508-3ef4c6746492 tempest-ServerShowV257Test-1857541781 tempest-ServerShowV257Test-1857541781-project-member] [instance: 8e40eb4c-9c1e-4147-beb3-0d5fa311b9d9] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 830.565964] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f0a8815b-8f4c-415f-8508-3ef4c6746492 tempest-ServerShowV257Test-1857541781 tempest-ServerShowV257Test-1857541781-project-member] Lock "8e40eb4c-9c1e-4147-beb3-0d5fa311b9d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.667s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.577113] env[69027]: DEBUG nova.compute.manager [None req-fe2ba5b9-0e5c-420f-b361-523d31e16a2d tempest-ImagesOneServerTestJSON-251492339 tempest-ImagesOneServerTestJSON-251492339-project-member] [instance: 918f38e1-e1b4-40d7-a79c-49257a814941] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 830.593673] env[69027]: DEBUG nova.network.neutron [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 830.606436] env[69027]: DEBUG nova.compute.manager [None req-fe2ba5b9-0e5c-420f-b361-523d31e16a2d tempest-ImagesOneServerTestJSON-251492339 tempest-ImagesOneServerTestJSON-251492339-project-member] [instance: 918f38e1-e1b4-40d7-a79c-49257a814941] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 830.640488] env[69027]: DEBUG oslo_concurrency.lockutils [None req-fe2ba5b9-0e5c-420f-b361-523d31e16a2d tempest-ImagesOneServerTestJSON-251492339 tempest-ImagesOneServerTestJSON-251492339-project-member] Lock "918f38e1-e1b4-40d7-a79c-49257a814941" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.449s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.653150] env[69027]: DEBUG nova.compute.manager [None req-361b5fc3-c112-431b-8930-ecb8c4a23db8 tempest-FloatingIPsAssociationNegativeTestJSON-1991375006 tempest-FloatingIPsAssociationNegativeTestJSON-1991375006-project-member] [instance: 5bdb4d11-9125-4c28-89b0-8fd9147e0c0f] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 830.684545] env[69027]: DEBUG nova.compute.manager [None req-361b5fc3-c112-431b-8930-ecb8c4a23db8 tempest-FloatingIPsAssociationNegativeTestJSON-1991375006 tempest-FloatingIPsAssociationNegativeTestJSON-1991375006-project-member] [instance: 5bdb4d11-9125-4c28-89b0-8fd9147e0c0f] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 830.706443] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd5b48b-0fd5-4db6-8079-4a731ced987b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.713527] env[69027]: DEBUG oslo_concurrency.lockutils [None req-361b5fc3-c112-431b-8930-ecb8c4a23db8 tempest-FloatingIPsAssociationNegativeTestJSON-1991375006 tempest-FloatingIPsAssociationNegativeTestJSON-1991375006-project-member] Lock "5bdb4d11-9125-4c28-89b0-8fd9147e0c0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.353s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.718564] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3d6e41-8fe3-4190-83a3-d65e0b49db92 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.726910] env[69027]: DEBUG nova.compute.manager [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 830.758719] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff844eb9-a752-4622-be10-de7b65247e38 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.768496] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b7e0f6-d955-4315-8b08-8f211fde5e8d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.789528] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.791339] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.799135] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 830.815378] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 830.815378] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.905s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.815773] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.024s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.817240] env[69027]: INFO nova.compute.claims [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 831.110369] env[69027]: DEBUG nova.network.neutron [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.121737] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Releasing lock "refresh_cache-887fbbeb-c981-4cc3-94e6-c232774507c7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.122181] env[69027]: DEBUG nova.compute.manager [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 831.122380] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 831.122953] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8286871-bb11-410a-87e2-27a977f1b673 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.134153] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c401015-7393-411a-b9d2-62a51356a468 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.170993] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 887fbbeb-c981-4cc3-94e6-c232774507c7 could not be found. [ 831.170993] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 831.170993] env[69027]: INFO nova.compute.manager [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 831.170993] env[69027]: DEBUG oslo.service.loopingcall [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 831.170993] env[69027]: DEBUG nova.compute.manager [-] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 831.171200] env[69027]: DEBUG nova.network.neutron [-] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 831.209748] env[69027]: DEBUG nova.network.neutron [-] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 831.219576] env[69027]: DEBUG nova.network.neutron [-] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.237822] env[69027]: INFO nova.compute.manager [-] [instance: 887fbbeb-c981-4cc3-94e6-c232774507c7] Took 0.07 seconds to deallocate network for instance. [ 831.333852] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Acquiring lock "4927121e-a71e-47e9-9475-603096e82492" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.383704] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9064822b-9f5a-49a3-a833-3d9d3b98ff09 tempest-ServersTestManualDisk-892053454 tempest-ServersTestManualDisk-892053454-project-member] Lock "887fbbeb-c981-4cc3-94e6-c232774507c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.887s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.451975] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de62eb0b-f818-4bbb-a0fd-e551af4d31a7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.459704] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1edb88-45dc-46ab-ad50-3e5e944086bd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.495231] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3da1cd-8964-47a0-8f29-98541e5233db {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.504861] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afd8ea1-aac4-40cf-919a-9fb00b6a30d4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.527335] env[69027]: DEBUG nova.compute.provider_tree [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.543048] env[69027]: DEBUG nova.scheduler.client.report [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 831.563989] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.746s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.563989] env[69027]: DEBUG nova.compute.manager [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 831.616823] env[69027]: DEBUG nova.compute.claims [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 831.616823] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.616823] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.764732] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.770785] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.771976] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 831.771976] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 831.859059] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d849cff0-e8e6-4113-ad53-ca385754edb9 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquiring lock "40b03034-9a4d-4c60-9847-9e24963b0d0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.859320] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d849cff0-e8e6-4113-ad53-ca385754edb9 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "40b03034-9a4d-4c60-9847-9e24963b0d0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.091947] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64c0487-ab5e-4ee7-beff-486cf9712fc2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.099868] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684975e2-06d7-4c76-8ced-d187662f27cd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.135929] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d4e287-2f1b-4a48-99bd-0ed58569d4bc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.144412] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3057a3-9f74-4c2c-ab74-bd4ae11f7f7a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.160489] env[69027]: DEBUG nova.compute.provider_tree [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.173912] env[69027]: DEBUG nova.scheduler.client.report [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 832.200594] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.585s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.201563] env[69027]: DEBUG nova.compute.utils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Conflict updating instance 4927121e-a71e-47e9-9475-603096e82492. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 832.203388] env[69027]: DEBUG nova.compute.manager [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Instance disappeared during build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 832.205455] env[69027]: DEBUG nova.compute.manager [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 832.205455] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Acquiring lock "refresh_cache-4927121e-a71e-47e9-9475-603096e82492" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.205455] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Acquired lock "refresh_cache-4927121e-a71e-47e9-9475-603096e82492" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.205455] env[69027]: DEBUG nova.network.neutron [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 832.277320] env[69027]: DEBUG nova.network.neutron [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 832.771528] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.771975] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 832.939185] env[69027]: DEBUG nova.network.neutron [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.950912] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Releasing lock "refresh_cache-4927121e-a71e-47e9-9475-603096e82492" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.954122] env[69027]: DEBUG nova.compute.manager [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 832.954379] env[69027]: DEBUG nova.compute.manager [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 832.954567] env[69027]: DEBUG nova.network.neutron [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 833.030462] env[69027]: DEBUG nova.network.neutron [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.041943] env[69027]: DEBUG nova.network.neutron [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.057570] env[69027]: INFO nova.compute.manager [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Took 0.10 seconds to deallocate network for instance. [ 833.214432] env[69027]: INFO nova.scheduler.client.report [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Deleted allocations for instance 4927121e-a71e-47e9-9475-603096e82492 [ 833.214432] env[69027]: DEBUG oslo_concurrency.lockutils [None req-87d8db21-8f95-4e90-bd6b-89bad7683a3a tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Lock "4927121e-a71e-47e9-9475-603096e82492" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.087s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.214432] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Lock "4927121e-a71e-47e9-9475-603096e82492" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.881s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.214432] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Acquiring lock "4927121e-a71e-47e9-9475-603096e82492-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.214643] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Lock "4927121e-a71e-47e9-9475-603096e82492-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.215433] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Lock "4927121e-a71e-47e9-9475-603096e82492-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.218214] env[69027]: INFO nova.compute.manager [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Terminating instance [ 833.220385] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Acquiring lock "refresh_cache-4927121e-a71e-47e9-9475-603096e82492" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.224281] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Acquired lock "refresh_cache-4927121e-a71e-47e9-9475-603096e82492" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.224281] env[69027]: DEBUG nova.network.neutron [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 833.226888] env[69027]: DEBUG nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 833.284926] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.285423] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.286930] env[69027]: INFO nova.compute.claims [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.294400] env[69027]: DEBUG nova.network.neutron [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.366050] env[69027]: DEBUG nova.scheduler.client.report [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Refreshing inventories for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 833.385661] env[69027]: DEBUG nova.scheduler.client.report [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Updating ProviderTree inventory for provider 4923c91f-3b2b-4ad1-a821-36209acae639 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 833.385794] env[69027]: DEBUG nova.compute.provider_tree [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Updating inventory in ProviderTree for provider 4923c91f-3b2b-4ad1-a821-36209acae639 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 833.398320] env[69027]: DEBUG nova.scheduler.client.report [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Refreshing aggregate associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, aggregates: None {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 833.421859] env[69027]: DEBUG nova.scheduler.client.report [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Refreshing trait associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 833.657130] env[69027]: DEBUG nova.network.neutron [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.676100] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Releasing lock "refresh_cache-4927121e-a71e-47e9-9475-603096e82492" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.677738] env[69027]: DEBUG nova.compute.manager [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 833.677967] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 833.679045] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9321052f-4115-4302-a063-783a487331a6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.699023] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce32f8a-621d-4836-8b97-691c97242396 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.735624] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4927121e-a71e-47e9-9475-603096e82492 could not be found. [ 833.735815] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 833.736435] env[69027]: INFO nova.compute.manager [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] [instance: 4927121e-a71e-47e9-9475-603096e82492] Took 0.06 seconds to destroy the instance on the hypervisor. [ 833.736435] env[69027]: DEBUG oslo.service.loopingcall [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 833.736681] env[69027]: DEBUG nova.compute.manager [-] [instance: 4927121e-a71e-47e9-9475-603096e82492] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 833.737039] env[69027]: DEBUG nova.network.neutron [-] [instance: 4927121e-a71e-47e9-9475-603096e82492] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 833.774221] env[69027]: DEBUG nova.network.neutron [-] [instance: 4927121e-a71e-47e9-9475-603096e82492] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.785764] env[69027]: DEBUG nova.network.neutron [-] [instance: 4927121e-a71e-47e9-9475-603096e82492] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.796410] env[69027]: INFO nova.compute.manager [-] [instance: 4927121e-a71e-47e9-9475-603096e82492] Took 0.06 seconds to deallocate network for instance. [ 833.887519] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8da1179-56c2-4773-a53a-0b202aa31e59 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.896954] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33279e60-521f-4c1e-b0d5-81f602650a99 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.937564] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b5b36c-545c-4b77-a386-2b6da8ac9781 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.942075] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0772c27b-8e67-4d0a-aba6-4178d1c2b8ce tempest-ServerActionsTestOtherA-312268694 tempest-ServerActionsTestOtherA-312268694-project-member] Lock "4927121e-a71e-47e9-9475-603096e82492" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.727s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.949285] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12047d6-548f-4411-b7a8-11c7df05a0b4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.972671] env[69027]: DEBUG nova.compute.provider_tree [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.986721] env[69027]: DEBUG nova.scheduler.client.report [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 834.004776] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.719s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.005386] env[69027]: DEBUG nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 834.061204] env[69027]: DEBUG nova.compute.utils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 834.067178] env[69027]: DEBUG nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 834.067336] env[69027]: DEBUG nova.network.neutron [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 834.078182] env[69027]: DEBUG nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 834.159288] env[69027]: DEBUG nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 834.201445] env[69027]: DEBUG nova.virt.hardware [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 834.201902] env[69027]: DEBUG nova.virt.hardware [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 834.202813] env[69027]: DEBUG nova.virt.hardware [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.202813] env[69027]: DEBUG nova.virt.hardware [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 834.202813] env[69027]: DEBUG nova.virt.hardware [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.202813] env[69027]: DEBUG nova.virt.hardware [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 834.203721] env[69027]: DEBUG nova.virt.hardware [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 834.203721] env[69027]: DEBUG nova.virt.hardware [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 834.203901] env[69027]: DEBUG nova.virt.hardware [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 834.205827] env[69027]: DEBUG nova.virt.hardware [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 834.205827] env[69027]: DEBUG nova.virt.hardware [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 834.205827] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72427813-22bf-44da-84ab-3c03f90d6539 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.215939] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31738cbf-2251-4459-8249-0bd64444ca0c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.628131] env[69027]: DEBUG nova.policy [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '263b37939f904065bde47292c55d640a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9819eb55e5114a789e4afe3cfab978d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 834.775807] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.156052] env[69027]: DEBUG oslo_concurrency.lockutils [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquiring lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.236956] env[69027]: DEBUG nova.network.neutron [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Successfully created port: 05101b58-3f03-4460-a462-941d5e97a04f {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 840.488641] env[69027]: DEBUG nova.network.neutron [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Successfully updated port: 05101b58-3f03-4460-a462-941d5e97a04f {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 840.509339] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquiring lock "refresh_cache-9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.511009] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquired lock "refresh_cache-9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.511009] env[69027]: DEBUG nova.network.neutron [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 840.608770] env[69027]: DEBUG nova.network.neutron [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 841.155581] env[69027]: DEBUG nova.network.neutron [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Updating instance_info_cache with network_info: [{"id": "05101b58-3f03-4460-a462-941d5e97a04f", "address": "fa:16:3e:be:21:e0", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05101b58-3f", "ovs_interfaceid": "05101b58-3f03-4460-a462-941d5e97a04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.174674] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Releasing lock "refresh_cache-9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.174986] env[69027]: DEBUG nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Instance network_info: |[{"id": "05101b58-3f03-4460-a462-941d5e97a04f", "address": "fa:16:3e:be:21:e0", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05101b58-3f", "ovs_interfaceid": "05101b58-3f03-4460-a462-941d5e97a04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 841.175409] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:21:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05101b58-3f03-4460-a462-941d5e97a04f', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 841.188038] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Creating folder: Project (9819eb55e5114a789e4afe3cfab978d9). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 841.188510] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04eda6da-c9e2-46df-9bb4-69059bc5eff6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.200922] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Created folder: Project (9819eb55e5114a789e4afe3cfab978d9) in parent group-v677321. [ 841.201171] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Creating folder: Instances. Parent ref: group-v677362. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 841.201416] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91329606-1133-4576-8600-bb1c5de139dd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.213805] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Created folder: Instances in parent group-v677362. [ 841.213805] env[69027]: DEBUG oslo.service.loopingcall [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.213805] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 841.213805] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-642bb8b4-271b-402a-9279-a2364f8a5597 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.235923] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 841.235923] env[69027]: value = "task-3395114" [ 841.235923] env[69027]: _type = "Task" [ 841.235923] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.244919] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395114, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.300666] env[69027]: DEBUG nova.compute.manager [req-c0b8cebb-224c-4f13-82e5-e0e30b689ef9 req-ae721cb1-b6ce-4bf4-a02d-697d8fbc8168 service nova] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Received event network-vif-plugged-05101b58-3f03-4460-a462-941d5e97a04f {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 841.301257] env[69027]: DEBUG oslo_concurrency.lockutils [req-c0b8cebb-224c-4f13-82e5-e0e30b689ef9 req-ae721cb1-b6ce-4bf4-a02d-697d8fbc8168 service nova] Acquiring lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.301832] env[69027]: DEBUG oslo_concurrency.lockutils [req-c0b8cebb-224c-4f13-82e5-e0e30b689ef9 req-ae721cb1-b6ce-4bf4-a02d-697d8fbc8168 service nova] Lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.301832] env[69027]: DEBUG oslo_concurrency.lockutils [req-c0b8cebb-224c-4f13-82e5-e0e30b689ef9 req-ae721cb1-b6ce-4bf4-a02d-697d8fbc8168 service nova] Lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.301993] env[69027]: DEBUG nova.compute.manager [req-c0b8cebb-224c-4f13-82e5-e0e30b689ef9 req-ae721cb1-b6ce-4bf4-a02d-697d8fbc8168 service nova] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] No waiting events found dispatching network-vif-plugged-05101b58-3f03-4460-a462-941d5e97a04f {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 841.302237] env[69027]: WARNING nova.compute.manager [req-c0b8cebb-224c-4f13-82e5-e0e30b689ef9 req-ae721cb1-b6ce-4bf4-a02d-697d8fbc8168 service nova] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Received unexpected event network-vif-plugged-05101b58-3f03-4460-a462-941d5e97a04f for instance with vm_state building and task_state deleting. [ 841.748093] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395114, 'name': CreateVM_Task, 'duration_secs': 0.296746} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.748452] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 841.749096] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.749096] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.749462] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 841.749726] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30699d5e-2327-4071-855b-7d978623c5bc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.755229] env[69027]: DEBUG oslo_vmware.api [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Waiting for the task: (returnval){ [ 841.755229] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52c44707-4113-5fb9-f710-827addde89fc" [ 841.755229] env[69027]: _type = "Task" [ 841.755229] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.763681] env[69027]: DEBUG oslo_vmware.api [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52c44707-4113-5fb9-f710-827addde89fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.273818] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.274661] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.274661] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.601926] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9fadb744-1482-4829-a343-0abe932ebe3c tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "b2a14f3e-3920-4c24-96bc-e11cffc4ad57" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.605223] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9fadb744-1482-4829-a343-0abe932ebe3c tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "b2a14f3e-3920-4c24-96bc-e11cffc4ad57" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.684673] env[69027]: DEBUG nova.compute.manager [req-d59d4806-e347-43dc-8fb9-470e677ea51c req-5e71165a-4e84-493b-80f8-6d2a12c5e970 service nova] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Received event network-changed-05101b58-3f03-4460-a462-941d5e97a04f {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 844.684673] env[69027]: DEBUG nova.compute.manager [req-d59d4806-e347-43dc-8fb9-470e677ea51c req-5e71165a-4e84-493b-80f8-6d2a12c5e970 service nova] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Refreshing instance network info cache due to event network-changed-05101b58-3f03-4460-a462-941d5e97a04f. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 844.684673] env[69027]: DEBUG oslo_concurrency.lockutils [req-d59d4806-e347-43dc-8fb9-470e677ea51c req-5e71165a-4e84-493b-80f8-6d2a12c5e970 service nova] Acquiring lock "refresh_cache-9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.684673] env[69027]: DEBUG oslo_concurrency.lockutils [req-d59d4806-e347-43dc-8fb9-470e677ea51c req-5e71165a-4e84-493b-80f8-6d2a12c5e970 service nova] Acquired lock "refresh_cache-9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.684673] env[69027]: DEBUG nova.network.neutron [req-d59d4806-e347-43dc-8fb9-470e677ea51c req-5e71165a-4e84-493b-80f8-6d2a12c5e970 service nova] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Refreshing network info cache for port 05101b58-3f03-4460-a462-941d5e97a04f {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 846.406365] env[69027]: DEBUG nova.network.neutron [req-d59d4806-e347-43dc-8fb9-470e677ea51c req-5e71165a-4e84-493b-80f8-6d2a12c5e970 service nova] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Updated VIF entry in instance network info cache for port 05101b58-3f03-4460-a462-941d5e97a04f. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 846.408220] env[69027]: DEBUG nova.network.neutron [req-d59d4806-e347-43dc-8fb9-470e677ea51c req-5e71165a-4e84-493b-80f8-6d2a12c5e970 service nova] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Updating instance_info_cache with network_info: [{"id": "05101b58-3f03-4460-a462-941d5e97a04f", "address": "fa:16:3e:be:21:e0", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05101b58-3f", "ovs_interfaceid": "05101b58-3f03-4460-a462-941d5e97a04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.418412] env[69027]: DEBUG oslo_concurrency.lockutils [req-d59d4806-e347-43dc-8fb9-470e677ea51c req-5e71165a-4e84-493b-80f8-6d2a12c5e970 service nova] Releasing lock "refresh_cache-9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.092422] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ace109cb-8a8f-4f8d-8348-cb4429a75987 tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Acquiring lock "ac96fbcc-59d8-4625-a705-14410e0beec3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.092855] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ace109cb-8a8f-4f8d-8348-cb4429a75987 tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Lock "ac96fbcc-59d8-4625-a705-14410e0beec3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.122191] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ace109cb-8a8f-4f8d-8348-cb4429a75987 tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Acquiring lock "8c97744d-c2ff-477a-a973-d90d6b526559" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.122436] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ace109cb-8a8f-4f8d-8348-cb4429a75987 tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Lock "8c97744d-c2ff-477a-a973-d90d6b526559" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.212410] env[69027]: DEBUG oslo_concurrency.lockutils [None req-cf1eaba9-0715-4331-8f2c-4b67d539a3df tempest-ServersAaction247Test-332079165 tempest-ServersAaction247Test-332079165-project-member] Acquiring lock "6aa59a08-032a-4de9-8fef-cef1b176a046" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.213046] env[69027]: DEBUG oslo_concurrency.lockutils [None req-cf1eaba9-0715-4331-8f2c-4b67d539a3df tempest-ServersAaction247Test-332079165 tempest-ServersAaction247Test-332079165-project-member] Lock "6aa59a08-032a-4de9-8fef-cef1b176a046" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.705179] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e55d8d76-3f37-4538-9b49-c361368c032e tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] Acquiring lock "9cd6b8ee-27a8-4535-9550-29dd51fca73c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.705436] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e55d8d76-3f37-4538-9b49-c361368c032e tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] Lock "9cd6b8ee-27a8-4535-9550-29dd51fca73c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.093667] env[69027]: DEBUG oslo_concurrency.lockutils [None req-391beaea-9bb3-42e4-a081-0f07634a8349 tempest-ServersTestMultiNic-1655463347 tempest-ServersTestMultiNic-1655463347-project-member] Acquiring lock "9ab15458-940f-490d-9aae-858f9f928a80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.094110] env[69027]: DEBUG oslo_concurrency.lockutils [None req-391beaea-9bb3-42e4-a081-0f07634a8349 tempest-ServersTestMultiNic-1655463347 tempest-ServersTestMultiNic-1655463347-project-member] Lock "9ab15458-940f-490d-9aae-858f9f928a80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.972657] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f9a5890f-eff8-4ee2-b7d0-12770a9b5cc7 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "ed1fc438-2682-405c-94f3-42b8db784c47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.972657] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f9a5890f-eff8-4ee2-b7d0-12770a9b5cc7 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "ed1fc438-2682-405c-94f3-42b8db784c47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.705443] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a1ea9afe-f45d-4f09-ac2e-08d22b648973 tempest-ServerActionsTestOtherB-530383552 tempest-ServerActionsTestOtherB-530383552-project-member] Acquiring lock "f6a3e604-e5be-4633-91b0-d8790cc4b810" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.705675] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a1ea9afe-f45d-4f09-ac2e-08d22b648973 tempest-ServerActionsTestOtherB-530383552 tempest-ServerActionsTestOtherB-530383552-project-member] Lock "f6a3e604-e5be-4633-91b0-d8790cc4b810" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.315521] env[69027]: DEBUG oslo_concurrency.lockutils [None req-21d37828-0151-4169-a4c9-01288948fcfe tempest-InstanceActionsV221TestJSON-1059247159 tempest-InstanceActionsV221TestJSON-1059247159-project-member] Acquiring lock "68bd33f5-18fb-4ab5-8b23-98c2a94ec36a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.315521] env[69027]: DEBUG oslo_concurrency.lockutils [None req-21d37828-0151-4169-a4c9-01288948fcfe tempest-InstanceActionsV221TestJSON-1059247159 tempest-InstanceActionsV221TestJSON-1059247159-project-member] Lock "68bd33f5-18fb-4ab5-8b23-98c2a94ec36a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.946474] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f282a93c-7498-491a-be03-e364795bff7d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "74bb614d-40ee-4e2b-8085-8351f85fe1ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.946917] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f282a93c-7498-491a-be03-e364795bff7d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "74bb614d-40ee-4e2b-8085-8351f85fe1ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.095278] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2fb33b5d-c20f-441c-8ca2-5e9661477483 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] Acquiring lock "51ec5aa4-027a-4f24-acac-1b6933e679de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.095607] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2fb33b5d-c20f-441c-8ca2-5e9661477483 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] Lock "51ec5aa4-027a-4f24-acac-1b6933e679de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.063079] env[69027]: WARNING oslo_vmware.rw_handles [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 874.063079] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 874.063079] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 874.063079] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 874.063079] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 874.063079] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 874.063079] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 874.063079] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 874.063079] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 874.063079] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 874.063079] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 874.063079] env[69027]: ERROR oslo_vmware.rw_handles [ 874.063696] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/aadd63a4-baca-4c54-a822-150a09c380bb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 874.065336] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 874.065586] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Copying Virtual Disk [datastore2] vmware_temp/aadd63a4-baca-4c54-a822-150a09c380bb/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/aadd63a4-baca-4c54-a822-150a09c380bb/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 874.065874] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-525a2f3d-8303-4288-9ff3-dc0e44e1a161 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.075099] env[69027]: DEBUG oslo_vmware.api [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Waiting for the task: (returnval){ [ 874.075099] env[69027]: value = "task-3395115" [ 874.075099] env[69027]: _type = "Task" [ 874.075099] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.085015] env[69027]: DEBUG oslo_vmware.api [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Task: {'id': task-3395115, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.585525] env[69027]: DEBUG oslo_vmware.exceptions [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 874.585850] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.586580] env[69027]: ERROR nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 874.586580] env[69027]: Faults: ['InvalidArgument'] [ 874.586580] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Traceback (most recent call last): [ 874.586580] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 874.586580] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] yield resources [ 874.586580] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 874.586580] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] self.driver.spawn(context, instance, image_meta, [ 874.586580] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 874.586580] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 874.586580] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 874.586580] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] self._fetch_image_if_missing(context, vi) [ 874.586580] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] image_cache(vi, tmp_image_ds_loc) [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] vm_util.copy_virtual_disk( [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] session._wait_for_task(vmdk_copy_task) [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] return self.wait_for_task(task_ref) [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] return evt.wait() [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] result = hub.switch() [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 874.586963] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] return self.greenlet.switch() [ 874.587373] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 874.587373] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] self.f(*self.args, **self.kw) [ 874.587373] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 874.587373] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] raise exceptions.translate_fault(task_info.error) [ 874.587373] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 874.587373] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Faults: ['InvalidArgument'] [ 874.587373] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] [ 874.587373] env[69027]: INFO nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Terminating instance [ 874.588694] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.588936] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 874.589605] env[69027]: DEBUG nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 874.589864] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 874.590139] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1926534f-cbe0-487d-80eb-44a446879017 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.593953] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5006e42-3c2b-4d04-8100-104fdb227ef6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.600655] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 874.600911] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58559a53-4c98-4c35-8a35-2cc4cb358306 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.603199] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 874.603414] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 874.604422] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba422655-f3c3-4787-852d-4278610af057 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.609367] env[69027]: DEBUG oslo_vmware.api [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 874.609367] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]526aecac-6cce-3f35-85d6-c3f515dcbc6b" [ 874.609367] env[69027]: _type = "Task" [ 874.609367] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.616567] env[69027]: DEBUG oslo_vmware.api [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]526aecac-6cce-3f35-85d6-c3f515dcbc6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.669557] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 874.669850] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 874.670058] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Deleting the datastore file [datastore2] ae5e2ca1-75e2-4023-b297-4cc265f038e5 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.670316] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77130464-23f0-4752-8604-dbb5a5b29540 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.676837] env[69027]: DEBUG oslo_vmware.api [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Waiting for the task: (returnval){ [ 874.676837] env[69027]: value = "task-3395117" [ 874.676837] env[69027]: _type = "Task" [ 874.676837] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.685253] env[69027]: DEBUG oslo_vmware.api [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Task: {'id': task-3395117, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.121834] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 875.122725] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Creating directory with path [datastore2] vmware_temp/254cf9c0-f6f1-4a27-bd01-fcfe6f7f42da/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 875.122725] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e2aa44f-3206-4cd4-8acd-58771201a80e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.134949] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Created directory with path [datastore2] vmware_temp/254cf9c0-f6f1-4a27-bd01-fcfe6f7f42da/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 875.134949] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Fetch image to [datastore2] vmware_temp/254cf9c0-f6f1-4a27-bd01-fcfe6f7f42da/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 875.134949] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/254cf9c0-f6f1-4a27-bd01-fcfe6f7f42da/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 875.135185] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9514fb09-7dbe-4eca-a134-50a747981ea8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.142138] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b9f1a9-47ba-460e-ae63-cf674e2e3bb5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.152396] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a2b3be-06cc-4a19-aeb0-87951c53671d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.187031] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be52860-01e9-4538-a378-0cd964bc34da {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.194705] env[69027]: DEBUG oslo_vmware.api [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Task: {'id': task-3395117, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066078} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.196328] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 875.196548] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 875.196694] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 875.197123] env[69027]: INFO nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Took 0.61 seconds to destroy the instance on the hypervisor. [ 875.200052] env[69027]: DEBUG nova.compute.claims [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 875.200052] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.200052] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.202741] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ca5a76d8-36d8-4016-9de1-6de628fda07d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.222413] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 875.352072] env[69027]: DEBUG oslo_vmware.rw_handles [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/254cf9c0-f6f1-4a27-bd01-fcfe6f7f42da/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 875.416541] env[69027]: DEBUG oslo_vmware.rw_handles [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 875.416769] env[69027]: DEBUG oslo_vmware.rw_handles [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/254cf9c0-f6f1-4a27-bd01-fcfe6f7f42da/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 875.761213] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd5aee4-d5f2-4dcc-9a59-8a5c35c54cee {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.771057] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5303f9-d5c0-4fe4-a8ee-12c51f0ee9ac {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.801600] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeddb9be-b6d8-4416-bd03-ed0a1d6fe0eb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.808966] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f98de0-9734-49c1-9222-5ebabe45b25a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.823089] env[69027]: DEBUG nova.compute.provider_tree [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.831721] env[69027]: DEBUG nova.scheduler.client.report [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 875.848703] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.649s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.849247] env[69027]: ERROR nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 875.849247] env[69027]: Faults: ['InvalidArgument'] [ 875.849247] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Traceback (most recent call last): [ 875.849247] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 875.849247] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] self.driver.spawn(context, instance, image_meta, [ 875.849247] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 875.849247] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 875.849247] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 875.849247] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] self._fetch_image_if_missing(context, vi) [ 875.849247] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 875.849247] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] image_cache(vi, tmp_image_ds_loc) [ 875.849247] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] vm_util.copy_virtual_disk( [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] session._wait_for_task(vmdk_copy_task) [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] return self.wait_for_task(task_ref) [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] return evt.wait() [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] result = hub.switch() [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] return self.greenlet.switch() [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 875.849874] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] self.f(*self.args, **self.kw) [ 875.850450] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 875.850450] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] raise exceptions.translate_fault(task_info.error) [ 875.850450] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 875.850450] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Faults: ['InvalidArgument'] [ 875.850450] env[69027]: ERROR nova.compute.manager [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] [ 875.850450] env[69027]: DEBUG nova.compute.utils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 875.851419] env[69027]: DEBUG nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Build of instance ae5e2ca1-75e2-4023-b297-4cc265f038e5 was re-scheduled: A specified parameter was not correct: fileType [ 875.851419] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 875.851797] env[69027]: DEBUG nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 875.851981] env[69027]: DEBUG nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 875.852174] env[69027]: DEBUG nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 875.852344] env[69027]: DEBUG nova.network.neutron [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 876.402690] env[69027]: DEBUG nova.network.neutron [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.420175] env[69027]: INFO nova.compute.manager [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Took 0.57 seconds to deallocate network for instance. [ 876.540884] env[69027]: INFO nova.scheduler.client.report [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Deleted allocations for instance ae5e2ca1-75e2-4023-b297-4cc265f038e5 [ 876.567342] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb399e7a-b439-4a92-83e3-976117797563 tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 287.232s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.568603] env[69027]: DEBUG oslo_concurrency.lockutils [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 86.941s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.568828] env[69027]: DEBUG oslo_concurrency.lockutils [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Acquiring lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.569359] env[69027]: DEBUG oslo_concurrency.lockutils [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.569539] env[69027]: DEBUG oslo_concurrency.lockutils [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.571807] env[69027]: INFO nova.compute.manager [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Terminating instance [ 876.573690] env[69027]: DEBUG nova.compute.manager [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 876.573923] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 876.574452] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f9cf0bd-132a-43dd-a19c-beb87f9403fc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.580062] env[69027]: DEBUG nova.compute.manager [None req-34f42b96-9e7c-4dcd-a599-54b301eafc39 tempest-AttachInterfacesUnderV243Test-492310881 tempest-AttachInterfacesUnderV243Test-492310881-project-member] [instance: fbc37887-513b-4b67-915f-e6862ea585e6] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 876.586776] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00956dbb-1413-4278-982f-4f63adf282ed {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.617170] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ae5e2ca1-75e2-4023-b297-4cc265f038e5 could not be found. [ 876.617170] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 876.617170] env[69027]: INFO nova.compute.manager [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 876.617170] env[69027]: DEBUG oslo.service.loopingcall [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.617569] env[69027]: DEBUG nova.compute.manager [None req-34f42b96-9e7c-4dcd-a599-54b301eafc39 tempest-AttachInterfacesUnderV243Test-492310881 tempest-AttachInterfacesUnderV243Test-492310881-project-member] [instance: fbc37887-513b-4b67-915f-e6862ea585e6] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 876.618210] env[69027]: DEBUG nova.compute.manager [-] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 876.618336] env[69027]: DEBUG nova.network.neutron [-] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 876.659800] env[69027]: DEBUG oslo_concurrency.lockutils [None req-34f42b96-9e7c-4dcd-a599-54b301eafc39 tempest-AttachInterfacesUnderV243Test-492310881 tempest-AttachInterfacesUnderV243Test-492310881-project-member] Lock "fbc37887-513b-4b67-915f-e6862ea585e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.790s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.669060] env[69027]: DEBUG nova.compute.manager [None req-aebe1c2a-11fc-4553-babd-236a00a537ff tempest-ServerDiagnosticsNegativeTest-1687517774 tempest-ServerDiagnosticsNegativeTest-1687517774-project-member] [instance: c42ce6eb-a29d-4e16-b5e6-ee507bd58819] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 876.671795] env[69027]: DEBUG nova.network.neutron [-] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.694411] env[69027]: DEBUG nova.compute.manager [None req-aebe1c2a-11fc-4553-babd-236a00a537ff tempest-ServerDiagnosticsNegativeTest-1687517774 tempest-ServerDiagnosticsNegativeTest-1687517774-project-member] [instance: c42ce6eb-a29d-4e16-b5e6-ee507bd58819] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 876.700485] env[69027]: INFO nova.compute.manager [-] [instance: ae5e2ca1-75e2-4023-b297-4cc265f038e5] Took 0.08 seconds to deallocate network for instance. [ 876.718648] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aebe1c2a-11fc-4553-babd-236a00a537ff tempest-ServerDiagnosticsNegativeTest-1687517774 tempest-ServerDiagnosticsNegativeTest-1687517774-project-member] Lock "c42ce6eb-a29d-4e16-b5e6-ee507bd58819" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.291s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.728405] env[69027]: DEBUG nova.compute.manager [None req-c7f503b2-bba0-4290-aab2-dab356bbd5e2 tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] [instance: 59c731c3-8604-481b-b761-29a5251411f2] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 876.755824] env[69027]: DEBUG nova.compute.manager [None req-c7f503b2-bba0-4290-aab2-dab356bbd5e2 tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] [instance: 59c731c3-8604-481b-b761-29a5251411f2] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 876.780703] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c7f503b2-bba0-4290-aab2-dab356bbd5e2 tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] Lock "59c731c3-8604-481b-b761-29a5251411f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.288s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.791014] env[69027]: DEBUG nova.compute.manager [None req-977ecc39-201b-4b24-a71f-65988694224c tempest-ServersTestBootFromVolume-666257734 tempest-ServersTestBootFromVolume-666257734-project-member] [instance: caee2e65-98e5-424d-8dd8-057732b921fc] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 876.817099] env[69027]: DEBUG oslo_concurrency.lockutils [None req-012dd3b8-6004-4593-88a4-63e76fd1af2e tempest-ServersAdminNegativeTestJSON-971328359 tempest-ServersAdminNegativeTestJSON-971328359-project-member] Lock "ae5e2ca1-75e2-4023-b297-4cc265f038e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.248s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.819392] env[69027]: DEBUG nova.compute.manager [None req-977ecc39-201b-4b24-a71f-65988694224c tempest-ServersTestBootFromVolume-666257734 tempest-ServersTestBootFromVolume-666257734-project-member] [instance: caee2e65-98e5-424d-8dd8-057732b921fc] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 876.848160] env[69027]: DEBUG oslo_concurrency.lockutils [None req-977ecc39-201b-4b24-a71f-65988694224c tempest-ServersTestBootFromVolume-666257734 tempest-ServersTestBootFromVolume-666257734-project-member] Lock "caee2e65-98e5-424d-8dd8-057732b921fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.056s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.859103] env[69027]: DEBUG nova.compute.manager [None req-0f498c2f-adc7-4a9c-b8df-3d6e432363a9 tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 695f7335-8e1d-4d7f-a377-8666b12e30f3] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 876.884297] env[69027]: DEBUG nova.compute.manager [None req-0f498c2f-adc7-4a9c-b8df-3d6e432363a9 tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] [instance: 695f7335-8e1d-4d7f-a377-8666b12e30f3] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 876.907822] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0f498c2f-adc7-4a9c-b8df-3d6e432363a9 tempest-DeleteServersAdminTestJSON-127646209 tempest-DeleteServersAdminTestJSON-127646209-project-member] Lock "695f7335-8e1d-4d7f-a377-8666b12e30f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.428s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.920314] env[69027]: DEBUG nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 876.983929] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 876.984287] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 876.986271] env[69027]: INFO nova.compute.claims [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 877.474405] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9d342b-79cd-43ae-84f2-80c0d313a7f7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.482465] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54012de9-d341-4f00-be99-b711d59180f2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.519995] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31441091-3743-49e1-90f8-e57b58ae46f3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.528206] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f763fb-d9c9-425f-9404-98305806550d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.542086] env[69027]: DEBUG nova.compute.provider_tree [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.551501] env[69027]: DEBUG nova.scheduler.client.report [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 877.570272] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.586s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.570781] env[69027]: DEBUG nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 877.607138] env[69027]: DEBUG nova.compute.utils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 877.608666] env[69027]: DEBUG nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 877.608852] env[69027]: DEBUG nova.network.neutron [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 877.626743] env[69027]: DEBUG nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 877.732358] env[69027]: DEBUG nova.policy [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a927086c41314a3a90bd1effeee4399a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b70232b938a4ad0b9229d415aa9b8bd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 877.747673] env[69027]: DEBUG nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 877.775015] env[69027]: DEBUG nova.virt.hardware [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 877.775288] env[69027]: DEBUG nova.virt.hardware [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 877.775446] env[69027]: DEBUG nova.virt.hardware [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.775629] env[69027]: DEBUG nova.virt.hardware [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 877.775780] env[69027]: DEBUG nova.virt.hardware [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.775927] env[69027]: DEBUG nova.virt.hardware [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 877.776868] env[69027]: DEBUG nova.virt.hardware [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 877.777093] env[69027]: DEBUG nova.virt.hardware [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 877.777290] env[69027]: DEBUG nova.virt.hardware [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 877.777465] env[69027]: DEBUG nova.virt.hardware [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 877.777647] env[69027]: DEBUG nova.virt.hardware [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 877.778520] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5dcd937-d6b5-48d7-a2fd-48763c97b1ad {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.787391] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e70da0-5c42-4350-93eb-ff0a06d9d58b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.349243] env[69027]: DEBUG nova.network.neutron [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Successfully created port: f260d000-f668-4224-933d-6313f6de1b5a {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 879.552543] env[69027]: DEBUG nova.compute.manager [req-41118b65-fb40-44e2-8d00-4c0964637572 req-dcf8bf5a-0b1b-4a50-b6b5-2618360fc35e service nova] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Received event network-vif-plugged-f260d000-f668-4224-933d-6313f6de1b5a {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 879.552834] env[69027]: DEBUG oslo_concurrency.lockutils [req-41118b65-fb40-44e2-8d00-4c0964637572 req-dcf8bf5a-0b1b-4a50-b6b5-2618360fc35e service nova] Acquiring lock "4ed95b65-233e-406e-8d27-2a5cd2694184-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.553012] env[69027]: DEBUG oslo_concurrency.lockutils [req-41118b65-fb40-44e2-8d00-4c0964637572 req-dcf8bf5a-0b1b-4a50-b6b5-2618360fc35e service nova] Lock "4ed95b65-233e-406e-8d27-2a5cd2694184-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.553202] env[69027]: DEBUG oslo_concurrency.lockutils [req-41118b65-fb40-44e2-8d00-4c0964637572 req-dcf8bf5a-0b1b-4a50-b6b5-2618360fc35e service nova] Lock "4ed95b65-233e-406e-8d27-2a5cd2694184-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 879.553472] env[69027]: DEBUG nova.compute.manager [req-41118b65-fb40-44e2-8d00-4c0964637572 req-dcf8bf5a-0b1b-4a50-b6b5-2618360fc35e service nova] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] No waiting events found dispatching network-vif-plugged-f260d000-f668-4224-933d-6313f6de1b5a {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 879.553650] env[69027]: WARNING nova.compute.manager [req-41118b65-fb40-44e2-8d00-4c0964637572 req-dcf8bf5a-0b1b-4a50-b6b5-2618360fc35e service nova] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Received unexpected event network-vif-plugged-f260d000-f668-4224-933d-6313f6de1b5a for instance with vm_state building and task_state spawning. [ 879.564969] env[69027]: DEBUG nova.network.neutron [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Successfully updated port: f260d000-f668-4224-933d-6313f6de1b5a {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 879.582785] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "refresh_cache-4ed95b65-233e-406e-8d27-2a5cd2694184" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.583043] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquired lock "refresh_cache-4ed95b65-233e-406e-8d27-2a5cd2694184" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.583223] env[69027]: DEBUG nova.network.neutron [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 879.663186] env[69027]: DEBUG nova.network.neutron [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 879.940032] env[69027]: DEBUG nova.network.neutron [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Updating instance_info_cache with network_info: [{"id": "f260d000-f668-4224-933d-6313f6de1b5a", "address": "fa:16:3e:5a:7f:81", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.102", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf260d000-f6", "ovs_interfaceid": "f260d000-f668-4224-933d-6313f6de1b5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.964780] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Releasing lock "refresh_cache-4ed95b65-233e-406e-8d27-2a5cd2694184" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.964780] env[69027]: DEBUG nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Instance network_info: |[{"id": "f260d000-f668-4224-933d-6313f6de1b5a", "address": "fa:16:3e:5a:7f:81", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.102", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf260d000-f6", "ovs_interfaceid": "f260d000-f668-4224-933d-6313f6de1b5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 879.965101] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:7f:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f260d000-f668-4224-933d-6313f6de1b5a', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 879.974966] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Creating folder: Project (2b70232b938a4ad0b9229d415aa9b8bd). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 879.978274] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98ec05fe-75ad-4837-8d21-8d7c888fd512 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.987377] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Created folder: Project (2b70232b938a4ad0b9229d415aa9b8bd) in parent group-v677321. [ 879.987561] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Creating folder: Instances. Parent ref: group-v677365. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 879.988058] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f9546b9-99c3-4bb3-81f7-d7e6bc550616 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.999345] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Created folder: Instances in parent group-v677365. [ 879.999619] env[69027]: DEBUG oslo.service.loopingcall [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 879.999866] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 880.000146] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f815151c-2534-416b-aa3d-623e7c2985b4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.025019] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 880.025019] env[69027]: value = "task-3395120" [ 880.025019] env[69027]: _type = "Task" [ 880.025019] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.031519] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395120, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.534334] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395120, 'name': CreateVM_Task, 'duration_secs': 0.306975} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.534696] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 880.535808] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 880.536150] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.536913] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 880.539062] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2a03a4f-7e5c-4c35-8b3f-bd07f76b861a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.541914] env[69027]: DEBUG oslo_vmware.api [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for the task: (returnval){ [ 880.541914] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52702538-c4f2-061a-ea74-ebdaa8c1dd5b" [ 880.541914] env[69027]: _type = "Task" [ 880.541914] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.552894] env[69027]: DEBUG oslo_vmware.api [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52702538-c4f2-061a-ea74-ebdaa8c1dd5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.054715] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.055034] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 881.055263] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.740010] env[69027]: DEBUG nova.compute.manager [req-d4d9fb81-b8ca-48db-ad7b-f81ebbe3d09b req-4dc9fbd5-6c12-48a8-baa3-ad1a01ab3688 service nova] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Received event network-changed-f260d000-f668-4224-933d-6313f6de1b5a {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 881.740296] env[69027]: DEBUG nova.compute.manager [req-d4d9fb81-b8ca-48db-ad7b-f81ebbe3d09b req-4dc9fbd5-6c12-48a8-baa3-ad1a01ab3688 service nova] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Refreshing instance network info cache due to event network-changed-f260d000-f668-4224-933d-6313f6de1b5a. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 881.740442] env[69027]: DEBUG oslo_concurrency.lockutils [req-d4d9fb81-b8ca-48db-ad7b-f81ebbe3d09b req-4dc9fbd5-6c12-48a8-baa3-ad1a01ab3688 service nova] Acquiring lock "refresh_cache-4ed95b65-233e-406e-8d27-2a5cd2694184" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.740585] env[69027]: DEBUG oslo_concurrency.lockutils [req-d4d9fb81-b8ca-48db-ad7b-f81ebbe3d09b req-4dc9fbd5-6c12-48a8-baa3-ad1a01ab3688 service nova] Acquired lock "refresh_cache-4ed95b65-233e-406e-8d27-2a5cd2694184" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.740785] env[69027]: DEBUG nova.network.neutron [req-d4d9fb81-b8ca-48db-ad7b-f81ebbe3d09b req-4dc9fbd5-6c12-48a8-baa3-ad1a01ab3688 service nova] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Refreshing network info cache for port f260d000-f668-4224-933d-6313f6de1b5a {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 882.122720] env[69027]: DEBUG nova.network.neutron [req-d4d9fb81-b8ca-48db-ad7b-f81ebbe3d09b req-4dc9fbd5-6c12-48a8-baa3-ad1a01ab3688 service nova] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Updated VIF entry in instance network info cache for port f260d000-f668-4224-933d-6313f6de1b5a. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 882.122720] env[69027]: DEBUG nova.network.neutron [req-d4d9fb81-b8ca-48db-ad7b-f81ebbe3d09b req-4dc9fbd5-6c12-48a8-baa3-ad1a01ab3688 service nova] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Updating instance_info_cache with network_info: [{"id": "f260d000-f668-4224-933d-6313f6de1b5a", "address": "fa:16:3e:5a:7f:81", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.102", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf260d000-f6", "ovs_interfaceid": "f260d000-f668-4224-933d-6313f6de1b5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.136268] env[69027]: DEBUG oslo_concurrency.lockutils [req-d4d9fb81-b8ca-48db-ad7b-f81ebbe3d09b req-4dc9fbd5-6c12-48a8-baa3-ad1a01ab3688 service nova] Releasing lock "refresh_cache-4ed95b65-233e-406e-8d27-2a5cd2694184" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.971802] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "bf4c80b4-bc0c-4198-9010-74fc50707745" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.972134] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "bf4c80b4-bc0c-4198-9010-74fc50707745" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.690814] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c6136b5c-beae-41ce-bb21-008b9561b9c8 tempest-ImagesOneServerNegativeTestJSON-1086940354 tempest-ImagesOneServerNegativeTestJSON-1086940354-project-member] Acquiring lock "84e96a3a-fbb3-4a54-b5c2-c0c463b9e8bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.691162] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c6136b5c-beae-41ce-bb21-008b9561b9c8 tempest-ImagesOneServerNegativeTestJSON-1086940354 tempest-ImagesOneServerNegativeTestJSON-1086940354-project-member] Lock "84e96a3a-fbb3-4a54-b5c2-c0c463b9e8bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.766596] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 891.770759] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 891.770997] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 891.771144] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 891.794273] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 891.794537] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 891.794704] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 891.794835] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 891.794961] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 891.795097] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 891.795218] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 891.795339] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 891.795459] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 891.795577] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 891.795695] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 891.796235] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 891.806431] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.806637] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.806804] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.806965] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 891.808045] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb47051-eb3c-45aa-bb77-2fa7dab58ef4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.817218] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f15a7b-528b-48bc-9ee0-8d142aca1715 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.831446] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f637792a-9196-4f87-9e55-2d08da808ec8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.837619] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f00ecf8-11b3-4d0e-b929-61ca0e0b63eb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.867403] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180997MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 891.867545] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.867739] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.939531] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9ff1dac6-b328-42c3-babe-86aef27466c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.939711] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance aec054f1-0d52-49be-9dee-8db0ae362f12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.939933] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 6ad953b9-4ded-42cd-86e0-2b1b707be4e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.940077] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 7c4204b8-2858-43a5-855d-c99b00e91d0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.940198] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 339bab90-238a-47ab-89f5-1ff9541ec14d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.940315] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b6a38a84-0b95-494c-a423-3360824ed8d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.940428] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.940540] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.940651] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.940776] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ed95b65-233e-406e-8d27-2a5cd2694184 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.954369] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbd6a238-1662-4c22-86ab-d31d4bb82734 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.963680] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.974047] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 405e3683-a1c1-4452-91df-4e52ebf25b65 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.983843] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c49c78cd-90f2-4157-8938-88492ae533ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 891.994050] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1fe37f8b-8fac-42fe-ac15-ac1d198d7dfd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.004646] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f86ab9df-e4a8-4515-81ec-a494446efa4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.015386] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 12e2bac8-06ec-43ab-bb9d-9331789aaf10 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.024812] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.035765] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 40b03034-9a4d-4c60-9847-9e24963b0d0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.046870] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b2a14f3e-3920-4c24-96bc-e11cffc4ad57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.056837] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ac96fbcc-59d8-4625-a705-14410e0beec3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.067258] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 8c97744d-c2ff-477a-a973-d90d6b526559 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.078283] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 6aa59a08-032a-4de9-8fef-cef1b176a046 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.088646] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9cd6b8ee-27a8-4535-9550-29dd51fca73c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.098670] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9ab15458-940f-490d-9aae-858f9f928a80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.110104] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ed1fc438-2682-405c-94f3-42b8db784c47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.119311] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f6a3e604-e5be-4633-91b0-d8790cc4b810 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.130717] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 68bd33f5-18fb-4ab5-8b23-98c2a94ec36a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.140208] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 74bb614d-40ee-4e2b-8085-8351f85fe1ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.149349] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 51ec5aa4-027a-4f24-acac-1b6933e679de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.159059] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bf4c80b4-bc0c-4198-9010-74fc50707745 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.168411] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 84e96a3a-fbb3-4a54-b5c2-c0c463b9e8bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.168647] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 892.168792] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 892.506040] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc11b88e-fdbe-4e3e-bc09-6e3a24eeb604 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.513630] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d879e075-e3f4-4331-a7b0-7e7e4b3a3a5c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.545671] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ba39e1-feb5-4eec-b2b3-f8ac4855f499 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.552684] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b4c3ff-6664-4e3a-b0c4-4882b031705c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.565283] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.573587] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 892.587919] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 892.588128] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.720s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.563390] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.585841] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.586082] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.770899] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.771171] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.771327] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 893.771474] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 895.771594] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.334038] env[69027]: WARNING oslo_vmware.rw_handles [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 924.334038] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 924.334038] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 924.334038] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 924.334038] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 924.334038] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 924.334038] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 924.334038] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 924.334038] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 924.334038] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 924.334038] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 924.334038] env[69027]: ERROR oslo_vmware.rw_handles [ 924.338418] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/254cf9c0-f6f1-4a27-bd01-fcfe6f7f42da/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 924.338418] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 924.338418] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Copying Virtual Disk [datastore2] vmware_temp/254cf9c0-f6f1-4a27-bd01-fcfe6f7f42da/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/254cf9c0-f6f1-4a27-bd01-fcfe6f7f42da/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 924.338418] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76ad406a-c69d-4bff-b72f-bd1b3e04b50e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.346339] env[69027]: DEBUG oslo_vmware.api [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 924.346339] env[69027]: value = "task-3395121" [ 924.346339] env[69027]: _type = "Task" [ 924.346339] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.355806] env[69027]: DEBUG oslo_vmware.api [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': task-3395121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.856858] env[69027]: DEBUG oslo_vmware.exceptions [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 924.857188] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.857787] env[69027]: ERROR nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 924.857787] env[69027]: Faults: ['InvalidArgument'] [ 924.857787] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Traceback (most recent call last): [ 924.857787] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 924.857787] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] yield resources [ 924.857787] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 924.857787] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] self.driver.spawn(context, instance, image_meta, [ 924.857787] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 924.857787] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 924.857787] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 924.857787] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] self._fetch_image_if_missing(context, vi) [ 924.857787] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] image_cache(vi, tmp_image_ds_loc) [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] vm_util.copy_virtual_disk( [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] session._wait_for_task(vmdk_copy_task) [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] return self.wait_for_task(task_ref) [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] return evt.wait() [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] result = hub.switch() [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 924.858134] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] return self.greenlet.switch() [ 924.858502] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 924.858502] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] self.f(*self.args, **self.kw) [ 924.858502] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 924.858502] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] raise exceptions.translate_fault(task_info.error) [ 924.858502] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 924.858502] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Faults: ['InvalidArgument'] [ 924.858502] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] [ 924.858502] env[69027]: INFO nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Terminating instance [ 924.859790] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.860049] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 924.860301] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f3ace7f-a5f8-46ec-b78e-e3a05407b499 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.862606] env[69027]: DEBUG nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 924.862797] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 924.863512] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c403b74e-c2cd-40db-961e-c5426b361550 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.869837] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 924.870098] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-989d5976-eb2e-4fff-87ea-716617bd01cd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.872157] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 924.872339] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 924.873249] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c449c28-5efd-46de-9c4f-c996680d9633 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.877972] env[69027]: DEBUG oslo_vmware.api [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Waiting for the task: (returnval){ [ 924.877972] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52a0d58c-33ae-f17f-3a64-e5786a4bfe9c" [ 924.877972] env[69027]: _type = "Task" [ 924.877972] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.887694] env[69027]: DEBUG oslo_vmware.api [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52a0d58c-33ae-f17f-3a64-e5786a4bfe9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.936856] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 924.937095] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 924.937283] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Deleting the datastore file [datastore2] 9ff1dac6-b328-42c3-babe-86aef27466c7 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 924.937556] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a60062be-9c30-4594-b5d1-e6dc29809669 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.943883] env[69027]: DEBUG oslo_vmware.api [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 924.943883] env[69027]: value = "task-3395123" [ 924.943883] env[69027]: _type = "Task" [ 924.943883] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.951454] env[69027]: DEBUG oslo_vmware.api [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': task-3395123, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.388356] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 925.388619] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Creating directory with path [datastore2] vmware_temp/c909f9c7-5fb5-4413-a971-d0da835be3f5/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.388831] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77bf8e75-d07d-4e40-81f7-8e936e71b5fa {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.400358] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Created directory with path [datastore2] vmware_temp/c909f9c7-5fb5-4413-a971-d0da835be3f5/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.400544] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Fetch image to [datastore2] vmware_temp/c909f9c7-5fb5-4413-a971-d0da835be3f5/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 925.400815] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/c909f9c7-5fb5-4413-a971-d0da835be3f5/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 925.401518] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35983a53-f12f-46b3-845f-f78d55f88577 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.408035] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c72d5d-2d65-443c-b300-e4df52d2b588 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.416931] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7b6813-ec75-4bb9-95ea-74035e1a1c36 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.450621] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d24017-91c9-4a62-a875-f0c3538125f2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.457510] env[69027]: DEBUG oslo_vmware.api [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': task-3395123, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070246} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.458991] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.459195] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 925.459372] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 925.459543] env[69027]: INFO nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 925.461585] env[69027]: DEBUG nova.compute.claims [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 925.461755] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.461977] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.464719] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f53ef2ad-032c-4092-9844-43d8e5f336a2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.487250] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 925.543937] env[69027]: DEBUG oslo_vmware.rw_handles [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c909f9c7-5fb5-4413-a971-d0da835be3f5/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 925.606928] env[69027]: DEBUG oslo_vmware.rw_handles [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 925.607097] env[69027]: DEBUG oslo_vmware.rw_handles [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c909f9c7-5fb5-4413-a971-d0da835be3f5/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 925.946859] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f013266d-1aa8-4cdf-9115-44c052e37c31 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.954315] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e21e1c-d902-41fa-92be-17cb23a0b151 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.984253] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4da900-22f0-4e59-ace4-f2ff306448dc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.991567] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51edb18-2e94-4461-9c53-2d913e28c883 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.004530] env[69027]: DEBUG nova.compute.provider_tree [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.015046] env[69027]: DEBUG nova.scheduler.client.report [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 926.028639] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.567s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.029312] env[69027]: ERROR nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 926.029312] env[69027]: Faults: ['InvalidArgument'] [ 926.029312] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Traceback (most recent call last): [ 926.029312] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 926.029312] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] self.driver.spawn(context, instance, image_meta, [ 926.029312] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 926.029312] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 926.029312] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 926.029312] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] self._fetch_image_if_missing(context, vi) [ 926.029312] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 926.029312] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] image_cache(vi, tmp_image_ds_loc) [ 926.029312] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] vm_util.copy_virtual_disk( [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] session._wait_for_task(vmdk_copy_task) [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] return self.wait_for_task(task_ref) [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] return evt.wait() [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] result = hub.switch() [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] return self.greenlet.switch() [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 926.029653] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] self.f(*self.args, **self.kw) [ 926.029947] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 926.029947] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] raise exceptions.translate_fault(task_info.error) [ 926.029947] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 926.029947] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Faults: ['InvalidArgument'] [ 926.029947] env[69027]: ERROR nova.compute.manager [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] [ 926.030130] env[69027]: DEBUG nova.compute.utils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 926.031548] env[69027]: DEBUG nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Build of instance 9ff1dac6-b328-42c3-babe-86aef27466c7 was re-scheduled: A specified parameter was not correct: fileType [ 926.031548] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 926.031927] env[69027]: DEBUG nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 926.032119] env[69027]: DEBUG nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 926.032296] env[69027]: DEBUG nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 926.032493] env[69027]: DEBUG nova.network.neutron [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 926.365978] env[69027]: DEBUG nova.network.neutron [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.384024] env[69027]: INFO nova.compute.manager [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 9ff1dac6-b328-42c3-babe-86aef27466c7] Took 0.35 seconds to deallocate network for instance. [ 926.484576] env[69027]: INFO nova.scheduler.client.report [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Deleted allocations for instance 9ff1dac6-b328-42c3-babe-86aef27466c7 [ 926.507628] env[69027]: DEBUG oslo_concurrency.lockutils [None req-036b336a-d54b-4c38-acd1-c826199f5467 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "9ff1dac6-b328-42c3-babe-86aef27466c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 337.114s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.524768] env[69027]: DEBUG nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 926.571221] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.571483] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.573291] env[69027]: INFO nova.compute.claims [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.991704] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c05f884-5db2-4174-8242-d6d32b53ac94 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.999750] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae178e26-efad-4a8c-8a41-a13cb344612d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.029913] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9429bcf6-4b4e-41d4-9589-4117057bf76e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.036174] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcb7dfd-b1e8-4cd3-9050-35c72c7460d7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.050561] env[69027]: DEBUG nova.compute.provider_tree [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.058987] env[69027]: DEBUG nova.scheduler.client.report [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 927.074522] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.503s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.074996] env[69027]: DEBUG nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 927.113049] env[69027]: DEBUG nova.compute.utils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 927.114139] env[69027]: DEBUG nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 927.114316] env[69027]: DEBUG nova.network.neutron [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 927.123874] env[69027]: DEBUG nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 927.184689] env[69027]: DEBUG nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 927.187697] env[69027]: DEBUG nova.policy [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a927086c41314a3a90bd1effeee4399a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b70232b938a4ad0b9229d415aa9b8bd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 927.213694] env[69027]: DEBUG nova.virt.hardware [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 927.213936] env[69027]: DEBUG nova.virt.hardware [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 927.214170] env[69027]: DEBUG nova.virt.hardware [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 927.214378] env[69027]: DEBUG nova.virt.hardware [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 927.214526] env[69027]: DEBUG nova.virt.hardware [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 927.214673] env[69027]: DEBUG nova.virt.hardware [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 927.214881] env[69027]: DEBUG nova.virt.hardware [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 927.215056] env[69027]: DEBUG nova.virt.hardware [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 927.215231] env[69027]: DEBUG nova.virt.hardware [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 927.215398] env[69027]: DEBUG nova.virt.hardware [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 927.215570] env[69027]: DEBUG nova.virt.hardware [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 927.216421] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1232ad90-2a9b-4815-9a0b-0d27f75f1cda {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.224587] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99aa2817-8278-416c-92cd-5ec040e88747 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.586649] env[69027]: DEBUG nova.network.neutron [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Successfully created port: c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 928.347113] env[69027]: DEBUG nova.network.neutron [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Successfully updated port: c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.360476] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "refresh_cache-fbd6a238-1662-4c22-86ab-d31d4bb82734" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.360476] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquired lock "refresh_cache-fbd6a238-1662-4c22-86ab-d31d4bb82734" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.360476] env[69027]: DEBUG nova.network.neutron [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 928.405754] env[69027]: DEBUG nova.network.neutron [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.531309] env[69027]: DEBUG nova.compute.manager [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Received event network-vif-plugged-c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 928.531529] env[69027]: DEBUG oslo_concurrency.lockutils [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] Acquiring lock "fbd6a238-1662-4c22-86ab-d31d4bb82734-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.531734] env[69027]: DEBUG oslo_concurrency.lockutils [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] Lock "fbd6a238-1662-4c22-86ab-d31d4bb82734-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.531988] env[69027]: DEBUG oslo_concurrency.lockutils [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] Lock "fbd6a238-1662-4c22-86ab-d31d4bb82734-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.532092] env[69027]: DEBUG nova.compute.manager [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] No waiting events found dispatching network-vif-plugged-c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 928.532233] env[69027]: WARNING nova.compute.manager [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Received unexpected event network-vif-plugged-c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9 for instance with vm_state building and task_state spawning. [ 928.532392] env[69027]: DEBUG nova.compute.manager [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Received event network-changed-c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 928.532659] env[69027]: DEBUG nova.compute.manager [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Refreshing instance network info cache due to event network-changed-c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 928.532870] env[69027]: DEBUG oslo_concurrency.lockutils [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] Acquiring lock "refresh_cache-fbd6a238-1662-4c22-86ab-d31d4bb82734" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.646413] env[69027]: DEBUG nova.network.neutron [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Updating instance_info_cache with network_info: [{"id": "c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9", "address": "fa:16:3e:b2:f4:8c", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7ff6a18-3a", "ovs_interfaceid": "c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.662974] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Releasing lock "refresh_cache-fbd6a238-1662-4c22-86ab-d31d4bb82734" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.663319] env[69027]: DEBUG nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Instance network_info: |[{"id": "c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9", "address": "fa:16:3e:b2:f4:8c", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7ff6a18-3a", "ovs_interfaceid": "c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 928.663621] env[69027]: DEBUG oslo_concurrency.lockutils [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] Acquired lock "refresh_cache-fbd6a238-1662-4c22-86ab-d31d4bb82734" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.663803] env[69027]: DEBUG nova.network.neutron [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Refreshing network info cache for port c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 928.665552] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:f4:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 928.673440] env[69027]: DEBUG oslo.service.loopingcall [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 928.674422] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 928.676838] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88fce5d2-5285-4f56-be7f-d928d5ce6f4a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.697290] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 928.697290] env[69027]: value = "task-3395124" [ 928.697290] env[69027]: _type = "Task" [ 928.697290] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.707879] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395124, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.208987] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395124, 'name': CreateVM_Task, 'duration_secs': 0.267701} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.209257] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 929.210237] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.210419] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.210722] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 929.210958] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb9ca78d-f0fe-4207-881c-7c290b7b72e5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.215540] env[69027]: DEBUG oslo_vmware.api [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for the task: (returnval){ [ 929.215540] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5289197b-040d-667f-533c-047e5e3c885f" [ 929.215540] env[69027]: _type = "Task" [ 929.215540] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.227029] env[69027]: DEBUG oslo_vmware.api [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5289197b-040d-667f-533c-047e5e3c885f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.233362] env[69027]: DEBUG nova.network.neutron [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Updated VIF entry in instance network info cache for port c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 929.233685] env[69027]: DEBUG nova.network.neutron [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Updating instance_info_cache with network_info: [{"id": "c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9", "address": "fa:16:3e:b2:f4:8c", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7ff6a18-3a", "ovs_interfaceid": "c7ff6a18-3a9d-4ce3-bdb7-c13b22b757a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.245017] env[69027]: DEBUG oslo_concurrency.lockutils [req-a33f1b17-2923-4e04-b472-a9dd94377fc0 req-7b35c7c4-1d90-4dbf-9975-459c1c7cc036 service nova] Releasing lock "refresh_cache-fbd6a238-1662-4c22-86ab-d31d4bb82734" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.725837] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.726259] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 929.726348] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.438073] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "fbd6a238-1662-4c22-86ab-d31d4bb82734" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.766584] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 951.771404] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 951.771664] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 951.771730] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 951.795418] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 951.795886] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 951.796173] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 951.796429] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 951.796680] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 951.796946] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 951.798109] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 951.798109] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 951.798109] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 951.798109] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 951.798109] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 951.798357] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 951.810445] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.810707] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.810884] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.811097] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 951.812207] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505a3978-c900-4079-8809-00176de4f9fa {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.821366] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2768e719-288a-4477-b3ca-401b05d739f6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.836578] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081832c1-e01c-41f5-af0b-2e97a9cab98f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.842988] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dceec49a-c67f-4b04-b722-cc981f958bd0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.872617] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180965MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 951.872768] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.872964] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.947232] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance aec054f1-0d52-49be-9dee-8db0ae362f12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 951.947400] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 6ad953b9-4ded-42cd-86e0-2b1b707be4e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 951.947526] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 7c4204b8-2858-43a5-855d-c99b00e91d0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 951.947646] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 339bab90-238a-47ab-89f5-1ff9541ec14d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 951.947763] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b6a38a84-0b95-494c-a423-3360824ed8d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 951.947916] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 951.948046] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 951.948167] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 951.948307] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ed95b65-233e-406e-8d27-2a5cd2694184 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 951.948424] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbd6a238-1662-4c22-86ab-d31d4bb82734 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 951.960599] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 951.971339] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 405e3683-a1c1-4452-91df-4e52ebf25b65 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 951.982403] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c49c78cd-90f2-4157-8938-88492ae533ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 951.992621] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1fe37f8b-8fac-42fe-ac15-ac1d198d7dfd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.002450] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f86ab9df-e4a8-4515-81ec-a494446efa4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.012246] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 12e2bac8-06ec-43ab-bb9d-9331789aaf10 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.021606] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.031693] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 40b03034-9a4d-4c60-9847-9e24963b0d0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.040584] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b2a14f3e-3920-4c24-96bc-e11cffc4ad57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.049325] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ac96fbcc-59d8-4625-a705-14410e0beec3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.059568] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 8c97744d-c2ff-477a-a973-d90d6b526559 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.068832] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 6aa59a08-032a-4de9-8fef-cef1b176a046 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.078623] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9cd6b8ee-27a8-4535-9550-29dd51fca73c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.087611] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9ab15458-940f-490d-9aae-858f9f928a80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.098471] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ed1fc438-2682-405c-94f3-42b8db784c47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.108036] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f6a3e604-e5be-4633-91b0-d8790cc4b810 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.117970] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 68bd33f5-18fb-4ab5-8b23-98c2a94ec36a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.127535] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 74bb614d-40ee-4e2b-8085-8351f85fe1ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.136971] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 51ec5aa4-027a-4f24-acac-1b6933e679de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.146261] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bf4c80b4-bc0c-4198-9010-74fc50707745 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.156334] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 84e96a3a-fbb3-4a54-b5c2-c0c463b9e8bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.156573] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 952.156727] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 952.498795] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801d6623-46bb-4a5a-a2ca-f449af52a9c3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.507482] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373aa1aa-a656-4a85-96cc-242846eaed9e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.536498] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc87044f-b376-4966-9edb-26f82b7e4106 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.543246] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f89ee8-7910-4882-a77e-5b050b2d0083 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.555666] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.563975] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 952.578644] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 952.578837] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.706s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.551688] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.551988] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.771653] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.771878] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.772050] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 955.773798] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.771157] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.353309] env[69027]: WARNING oslo_vmware.rw_handles [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 972.353309] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 972.353309] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 972.353309] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 972.353309] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 972.353309] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 972.353309] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 972.353309] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 972.353309] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 972.353309] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 972.353309] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 972.353309] env[69027]: ERROR oslo_vmware.rw_handles [ 972.353895] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/c909f9c7-5fb5-4413-a971-d0da835be3f5/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 972.356125] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 972.356406] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Copying Virtual Disk [datastore2] vmware_temp/c909f9c7-5fb5-4413-a971-d0da835be3f5/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/c909f9c7-5fb5-4413-a971-d0da835be3f5/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 972.356718] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-322d7202-adec-4450-91a7-f9e189e481c5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.365140] env[69027]: DEBUG oslo_vmware.api [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Waiting for the task: (returnval){ [ 972.365140] env[69027]: value = "task-3395125" [ 972.365140] env[69027]: _type = "Task" [ 972.365140] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.373343] env[69027]: DEBUG oslo_vmware.api [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Task: {'id': task-3395125, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.876153] env[69027]: DEBUG oslo_vmware.exceptions [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 972.876153] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.877079] env[69027]: ERROR nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 972.877079] env[69027]: Faults: ['InvalidArgument'] [ 972.877079] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Traceback (most recent call last): [ 972.877079] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 972.877079] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] yield resources [ 972.877079] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 972.877079] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] self.driver.spawn(context, instance, image_meta, [ 972.877079] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 972.877079] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 972.877079] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 972.877079] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] self._fetch_image_if_missing(context, vi) [ 972.877079] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] image_cache(vi, tmp_image_ds_loc) [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] vm_util.copy_virtual_disk( [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] session._wait_for_task(vmdk_copy_task) [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] return self.wait_for_task(task_ref) [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] return evt.wait() [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] result = hub.switch() [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 972.877481] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] return self.greenlet.switch() [ 972.877829] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 972.877829] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] self.f(*self.args, **self.kw) [ 972.877829] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 972.877829] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] raise exceptions.translate_fault(task_info.error) [ 972.877829] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 972.877829] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Faults: ['InvalidArgument'] [ 972.877829] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] [ 972.877829] env[69027]: INFO nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Terminating instance [ 972.878833] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.878833] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 972.879436] env[69027]: DEBUG nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 972.879619] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 972.879855] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ceddfb5b-7e87-470f-8067-27e5c0793778 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.882309] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cb14d5-816a-49a0-b2e9-723aaf92fdb6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.890579] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 972.890819] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79a49819-d56c-42ed-a3de-a69f5e70e463 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.893115] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 972.893291] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 972.894230] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eca772c-5036-4388-9cce-efd4298239cd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.898904] env[69027]: DEBUG oslo_vmware.api [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Waiting for the task: (returnval){ [ 972.898904] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5272b790-0c2d-7491-a27f-08af504e10eb" [ 972.898904] env[69027]: _type = "Task" [ 972.898904] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.906047] env[69027]: DEBUG oslo_vmware.api [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5272b790-0c2d-7491-a27f-08af504e10eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.961272] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 972.961520] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 972.961701] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Deleting the datastore file [datastore2] aec054f1-0d52-49be-9dee-8db0ae362f12 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 972.961981] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdd17aa8-dd7a-4f9a-884d-947892ee7a33 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.969408] env[69027]: DEBUG oslo_vmware.api [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Waiting for the task: (returnval){ [ 972.969408] env[69027]: value = "task-3395127" [ 972.969408] env[69027]: _type = "Task" [ 972.969408] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.977183] env[69027]: DEBUG oslo_vmware.api [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Task: {'id': task-3395127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.409803] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 973.410099] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Creating directory with path [datastore2] vmware_temp/10de4fa4-cd97-4ab8-b5ff-933548faa405/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.410099] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e45ec92-49df-4284-902b-7e2d42dc2832 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.422188] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Created directory with path [datastore2] vmware_temp/10de4fa4-cd97-4ab8-b5ff-933548faa405/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.422395] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Fetch image to [datastore2] vmware_temp/10de4fa4-cd97-4ab8-b5ff-933548faa405/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 973.422575] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/10de4fa4-cd97-4ab8-b5ff-933548faa405/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 973.423339] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba61360d-62a7-4125-aaa0-efa363851ac2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.430041] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a7291f-602c-400e-969c-833d9160a8e9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.439288] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb2b304-f16f-4a3d-9067-28227763d06e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.470038] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb477e12-3d5d-4b97-8d69-dea8588d0205 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.480548] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-49b78ebb-96e5-4bf4-b8fa-e000e1337ab0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.482257] env[69027]: DEBUG oslo_vmware.api [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Task: {'id': task-3395127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076892} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.482434] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.482617] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 973.482789] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 973.482965] env[69027]: INFO nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Took 0.60 seconds to destroy the instance on the hypervisor. [ 973.485038] env[69027]: DEBUG nova.compute.claims [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 973.486026] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.486026] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.505354] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 973.566971] env[69027]: DEBUG oslo_vmware.rw_handles [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/10de4fa4-cd97-4ab8-b5ff-933548faa405/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 973.627994] env[69027]: DEBUG oslo_vmware.rw_handles [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 973.628213] env[69027]: DEBUG oslo_vmware.rw_handles [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/10de4fa4-cd97-4ab8-b5ff-933548faa405/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 973.947668] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56108eb1-ed80-4bb7-bb97-ac746da33006 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.956815] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72156edb-56a3-4225-ac5c-59e6c3a64272 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.988826] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06738830-51ae-4202-a42b-7ccea703a073 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.996666] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75e246b-60c9-421e-b4db-c9071176ef60 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.009629] env[69027]: DEBUG nova.compute.provider_tree [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.022273] env[69027]: DEBUG nova.scheduler.client.report [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 974.035217] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.550s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.035779] env[69027]: ERROR nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 974.035779] env[69027]: Faults: ['InvalidArgument'] [ 974.035779] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Traceback (most recent call last): [ 974.035779] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 974.035779] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] self.driver.spawn(context, instance, image_meta, [ 974.035779] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 974.035779] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 974.035779] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 974.035779] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] self._fetch_image_if_missing(context, vi) [ 974.035779] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 974.035779] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] image_cache(vi, tmp_image_ds_loc) [ 974.035779] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] vm_util.copy_virtual_disk( [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] session._wait_for_task(vmdk_copy_task) [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] return self.wait_for_task(task_ref) [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] return evt.wait() [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] result = hub.switch() [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] return self.greenlet.switch() [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 974.036150] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] self.f(*self.args, **self.kw) [ 974.036664] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 974.036664] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] raise exceptions.translate_fault(task_info.error) [ 974.036664] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 974.036664] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Faults: ['InvalidArgument'] [ 974.036664] env[69027]: ERROR nova.compute.manager [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] [ 974.036664] env[69027]: DEBUG nova.compute.utils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 974.038174] env[69027]: DEBUG nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Build of instance aec054f1-0d52-49be-9dee-8db0ae362f12 was re-scheduled: A specified parameter was not correct: fileType [ 974.038174] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 974.038631] env[69027]: DEBUG nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 974.038807] env[69027]: DEBUG nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 974.038980] env[69027]: DEBUG nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 974.039160] env[69027]: DEBUG nova.network.neutron [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 974.376025] env[69027]: DEBUG nova.network.neutron [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.386265] env[69027]: INFO nova.compute.manager [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Took 0.35 seconds to deallocate network for instance. [ 974.484184] env[69027]: INFO nova.scheduler.client.report [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Deleted allocations for instance aec054f1-0d52-49be-9dee-8db0ae362f12 [ 974.512013] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e0b4a29e-df64-4529-b910-50cf9032390d tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Lock "aec054f1-0d52-49be-9dee-8db0ae362f12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 384.151s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.512900] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Lock "aec054f1-0d52-49be-9dee-8db0ae362f12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 183.041s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.513134] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Acquiring lock "aec054f1-0d52-49be-9dee-8db0ae362f12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.513343] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Lock "aec054f1-0d52-49be-9dee-8db0ae362f12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.513515] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Lock "aec054f1-0d52-49be-9dee-8db0ae362f12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.515672] env[69027]: INFO nova.compute.manager [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Terminating instance [ 974.517488] env[69027]: DEBUG nova.compute.manager [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 974.517690] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 974.518186] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1d41e3f-5633-4dad-b489-53138c8d19c3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.527822] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71301e3b-1e82-4a35-8d93-8f283c4b7e4f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.540372] env[69027]: DEBUG nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 974.560674] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aec054f1-0d52-49be-9dee-8db0ae362f12 could not be found. [ 974.560874] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 974.561063] env[69027]: INFO nova.compute.manager [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Took 0.04 seconds to destroy the instance on the hypervisor. [ 974.561360] env[69027]: DEBUG oslo.service.loopingcall [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.561553] env[69027]: DEBUG nova.compute.manager [-] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 974.563041] env[69027]: DEBUG nova.network.neutron [-] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 974.584815] env[69027]: DEBUG nova.network.neutron [-] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.591704] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.591893] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.593363] env[69027]: INFO nova.compute.claims [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 974.596499] env[69027]: INFO nova.compute.manager [-] [instance: aec054f1-0d52-49be-9dee-8db0ae362f12] Took 0.03 seconds to deallocate network for instance. [ 974.688181] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ccea47b5-6ba1-4d9a-a08d-f27b1f403c45 tempest-ServerPasswordTestJSON-197125428 tempest-ServerPasswordTestJSON-197125428-project-member] Lock "aec054f1-0d52-49be-9dee-8db0ae362f12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.010117] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff65a75-217b-40dd-9184-09c0b48f7dd5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.017896] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c34aa7b-26c1-437a-a80f-f07d4ec9b471 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.047905] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14a03eb-c577-4d12-8348-169d7b91bac2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.055619] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9255cb-6dae-4beb-9e0d-c7c93c7325a3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.069998] env[69027]: DEBUG nova.compute.provider_tree [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.080353] env[69027]: DEBUG nova.scheduler.client.report [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 975.095653] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.504s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.096288] env[69027]: DEBUG nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 975.130687] env[69027]: DEBUG nova.compute.utils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 975.132165] env[69027]: DEBUG nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 975.132337] env[69027]: DEBUG nova.network.neutron [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 975.143250] env[69027]: DEBUG nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 975.207436] env[69027]: DEBUG nova.policy [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '288d179f4cb345a99f6de265a6a6f22e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '818112d3b2784ba090f2d9398126023c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 975.210464] env[69027]: DEBUG nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 975.236724] env[69027]: DEBUG nova.virt.hardware [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 975.236972] env[69027]: DEBUG nova.virt.hardware [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 975.237142] env[69027]: DEBUG nova.virt.hardware [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 975.237330] env[69027]: DEBUG nova.virt.hardware [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 975.237479] env[69027]: DEBUG nova.virt.hardware [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 975.237628] env[69027]: DEBUG nova.virt.hardware [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 975.237836] env[69027]: DEBUG nova.virt.hardware [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 975.237994] env[69027]: DEBUG nova.virt.hardware [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 975.238180] env[69027]: DEBUG nova.virt.hardware [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 975.238403] env[69027]: DEBUG nova.virt.hardware [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 975.238595] env[69027]: DEBUG nova.virt.hardware [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 975.239416] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2bd3e41-ed88-40f3-ba71-52925a97335e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.247717] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b80ffc4-a7aa-44b4-8a0b-5523b9d51fdf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.530207] env[69027]: DEBUG nova.network.neutron [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Successfully created port: c62d367b-c8e8-4e02-95ae-dab37a3ea09a {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 976.477107] env[69027]: DEBUG nova.network.neutron [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Successfully updated port: c62d367b-c8e8-4e02-95ae-dab37a3ea09a {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 976.492433] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquiring lock "refresh_cache-1757d80a-dc5f-4b8b-8e86-3562b36e1b21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.492580] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquired lock "refresh_cache-1757d80a-dc5f-4b8b-8e86-3562b36e1b21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.492791] env[69027]: DEBUG nova.network.neutron [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 976.554376] env[69027]: DEBUG nova.network.neutron [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 976.761188] env[69027]: DEBUG nova.compute.manager [req-bf538dd8-5df8-4888-9fae-36711cb865b1 req-1ec13db7-e12e-438c-833c-c156fc49b090 service nova] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Received event network-vif-plugged-c62d367b-c8e8-4e02-95ae-dab37a3ea09a {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 976.761188] env[69027]: DEBUG oslo_concurrency.lockutils [req-bf538dd8-5df8-4888-9fae-36711cb865b1 req-1ec13db7-e12e-438c-833c-c156fc49b090 service nova] Acquiring lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.761188] env[69027]: DEBUG oslo_concurrency.lockutils [req-bf538dd8-5df8-4888-9fae-36711cb865b1 req-1ec13db7-e12e-438c-833c-c156fc49b090 service nova] Lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.761188] env[69027]: DEBUG oslo_concurrency.lockutils [req-bf538dd8-5df8-4888-9fae-36711cb865b1 req-1ec13db7-e12e-438c-833c-c156fc49b090 service nova] Lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.761314] env[69027]: DEBUG nova.compute.manager [req-bf538dd8-5df8-4888-9fae-36711cb865b1 req-1ec13db7-e12e-438c-833c-c156fc49b090 service nova] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] No waiting events found dispatching network-vif-plugged-c62d367b-c8e8-4e02-95ae-dab37a3ea09a {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 976.761314] env[69027]: WARNING nova.compute.manager [req-bf538dd8-5df8-4888-9fae-36711cb865b1 req-1ec13db7-e12e-438c-833c-c156fc49b090 service nova] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Received unexpected event network-vif-plugged-c62d367b-c8e8-4e02-95ae-dab37a3ea09a for instance with vm_state building and task_state spawning. [ 976.824650] env[69027]: DEBUG nova.network.neutron [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Updating instance_info_cache with network_info: [{"id": "c62d367b-c8e8-4e02-95ae-dab37a3ea09a", "address": "fa:16:3e:05:4f:99", "network": {"id": "e0a92b84-c97a-4117-9592-4c8f35b1709f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1778989521-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "818112d3b2784ba090f2d9398126023c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc62d367b-c8", "ovs_interfaceid": "c62d367b-c8e8-4e02-95ae-dab37a3ea09a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.844021] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Releasing lock "refresh_cache-1757d80a-dc5f-4b8b-8e86-3562b36e1b21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.844021] env[69027]: DEBUG nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Instance network_info: |[{"id": "c62d367b-c8e8-4e02-95ae-dab37a3ea09a", "address": "fa:16:3e:05:4f:99", "network": {"id": "e0a92b84-c97a-4117-9592-4c8f35b1709f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1778989521-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "818112d3b2784ba090f2d9398126023c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc62d367b-c8", "ovs_interfaceid": "c62d367b-c8e8-4e02-95ae-dab37a3ea09a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 976.844287] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:4f:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0734cc4-5718-45e2-9f98-0ded96880bef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c62d367b-c8e8-4e02-95ae-dab37a3ea09a', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 976.852300] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Creating folder: Project (818112d3b2784ba090f2d9398126023c). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 976.853150] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6129774-b2c6-4ebe-b709-bd577476c99d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.866209] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Created folder: Project (818112d3b2784ba090f2d9398126023c) in parent group-v677321. [ 976.866209] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Creating folder: Instances. Parent ref: group-v677369. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 976.866209] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-879b302e-bd2a-47c2-8cf7-378ca5f20e22 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.879216] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Created folder: Instances in parent group-v677369. [ 976.879216] env[69027]: DEBUG oslo.service.loopingcall [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 976.879216] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 976.879216] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d464bc88-70dc-494f-8499-a365460ef754 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.903310] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 976.903310] env[69027]: value = "task-3395130" [ 976.903310] env[69027]: _type = "Task" [ 976.903310] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.912710] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395130, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.415534] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395130, 'name': CreateVM_Task, 'duration_secs': 0.328034} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.415736] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 977.416444] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.416611] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.416974] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 977.417248] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd32a724-df91-4546-b950-7c0241a4a427 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.422978] env[69027]: DEBUG oslo_vmware.api [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Waiting for the task: (returnval){ [ 977.422978] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5292e05b-467d-7a63-2376-b29f9f3df32c" [ 977.422978] env[69027]: _type = "Task" [ 977.422978] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.433150] env[69027]: DEBUG oslo_vmware.api [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5292e05b-467d-7a63-2376-b29f9f3df32c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.932797] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.933095] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 977.933312] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.803373] env[69027]: DEBUG nova.compute.manager [req-300e3e08-ce88-485b-81b2-5483415501be req-ea8d34ad-05bf-4caf-962f-1a480f821d38 service nova] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Received event network-changed-c62d367b-c8e8-4e02-95ae-dab37a3ea09a {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 978.803595] env[69027]: DEBUG nova.compute.manager [req-300e3e08-ce88-485b-81b2-5483415501be req-ea8d34ad-05bf-4caf-962f-1a480f821d38 service nova] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Refreshing instance network info cache due to event network-changed-c62d367b-c8e8-4e02-95ae-dab37a3ea09a. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 978.803764] env[69027]: DEBUG oslo_concurrency.lockutils [req-300e3e08-ce88-485b-81b2-5483415501be req-ea8d34ad-05bf-4caf-962f-1a480f821d38 service nova] Acquiring lock "refresh_cache-1757d80a-dc5f-4b8b-8e86-3562b36e1b21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.803911] env[69027]: DEBUG oslo_concurrency.lockutils [req-300e3e08-ce88-485b-81b2-5483415501be req-ea8d34ad-05bf-4caf-962f-1a480f821d38 service nova] Acquired lock "refresh_cache-1757d80a-dc5f-4b8b-8e86-3562b36e1b21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.804191] env[69027]: DEBUG nova.network.neutron [req-300e3e08-ce88-485b-81b2-5483415501be req-ea8d34ad-05bf-4caf-962f-1a480f821d38 service nova] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Refreshing network info cache for port c62d367b-c8e8-4e02-95ae-dab37a3ea09a {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 979.387377] env[69027]: DEBUG nova.network.neutron [req-300e3e08-ce88-485b-81b2-5483415501be req-ea8d34ad-05bf-4caf-962f-1a480f821d38 service nova] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Updated VIF entry in instance network info cache for port c62d367b-c8e8-4e02-95ae-dab37a3ea09a. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 979.387733] env[69027]: DEBUG nova.network.neutron [req-300e3e08-ce88-485b-81b2-5483415501be req-ea8d34ad-05bf-4caf-962f-1a480f821d38 service nova] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Updating instance_info_cache with network_info: [{"id": "c62d367b-c8e8-4e02-95ae-dab37a3ea09a", "address": "fa:16:3e:05:4f:99", "network": {"id": "e0a92b84-c97a-4117-9592-4c8f35b1709f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1778989521-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "818112d3b2784ba090f2d9398126023c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0734cc4-5718-45e2-9f98-0ded96880bef", "external-id": "nsx-vlan-transportzone-875", "segmentation_id": 875, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc62d367b-c8", "ovs_interfaceid": "c62d367b-c8e8-4e02-95ae-dab37a3ea09a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.405856] env[69027]: DEBUG oslo_concurrency.lockutils [req-300e3e08-ce88-485b-81b2-5483415501be req-ea8d34ad-05bf-4caf-962f-1a480f821d38 service nova] Releasing lock "refresh_cache-1757d80a-dc5f-4b8b-8e86-3562b36e1b21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.978544] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquiring lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.736290] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "90a6375b-4834-406d-abd5-5cf47b7cfc12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.736682] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "90a6375b-4834-406d-abd5-5cf47b7cfc12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.806630] env[69027]: DEBUG oslo_concurrency.lockutils [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "7c4204b8-2858-43a5-855d-c99b00e91d0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.467863] env[69027]: DEBUG oslo_concurrency.lockutils [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "339bab90-238a-47ab-89f5-1ff9541ec14d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.960919] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "a907f1ab-3540-4bc0-8389-005233cca940" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.961231] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "a907f1ab-3540-4bc0-8389-005233cca940" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.766595] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1011.771187] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1011.783744] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.783974] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.784170] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.784327] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1011.785465] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965c1d8e-1838-4c55-8381-4f70233e818d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.794466] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42291b2e-77df-4762-ba36-67ef149f80d3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.808217] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c07ce4-4caf-4651-b524-f936c4b707b7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.814581] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c4fbe4-5cf2-44cc-a325-b9214e65a1f4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.844103] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180963MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1011.844275] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.844468] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.921553] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 6ad953b9-4ded-42cd-86e0-2b1b707be4e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.921753] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 7c4204b8-2858-43a5-855d-c99b00e91d0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.921900] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 339bab90-238a-47ab-89f5-1ff9541ec14d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.922046] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b6a38a84-0b95-494c-a423-3360824ed8d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.922184] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.922302] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.922419] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.922533] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ed95b65-233e-406e-8d27-2a5cd2694184 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.922645] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbd6a238-1662-4c22-86ab-d31d4bb82734 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.922758] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1011.935130] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f86ab9df-e4a8-4515-81ec-a494446efa4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1011.945745] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 12e2bac8-06ec-43ab-bb9d-9331789aaf10 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1011.955770] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1011.965955] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 40b03034-9a4d-4c60-9847-9e24963b0d0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1011.975139] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b2a14f3e-3920-4c24-96bc-e11cffc4ad57 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1011.985483] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ac96fbcc-59d8-4625-a705-14410e0beec3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1011.995177] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 8c97744d-c2ff-477a-a973-d90d6b526559 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.004900] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 6aa59a08-032a-4de9-8fef-cef1b176a046 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.014265] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9cd6b8ee-27a8-4535-9550-29dd51fca73c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.025172] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9ab15458-940f-490d-9aae-858f9f928a80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.034836] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ed1fc438-2682-405c-94f3-42b8db784c47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.044227] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f6a3e604-e5be-4633-91b0-d8790cc4b810 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.053749] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 68bd33f5-18fb-4ab5-8b23-98c2a94ec36a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.063894] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 74bb614d-40ee-4e2b-8085-8351f85fe1ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.073837] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 51ec5aa4-027a-4f24-acac-1b6933e679de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.085172] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bf4c80b4-bc0c-4198-9010-74fc50707745 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.095815] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 84e96a3a-fbb3-4a54-b5c2-c0c463b9e8bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.105991] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.115793] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a907f1ab-3540-4bc0-8389-005233cca940 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1012.116042] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1012.116192] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1012.423727] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2c115a-a349-4a77-adbf-1b63c9c29c30 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.431088] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e18403-7bd1-49d1-a1e0-69356deebf6d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.460190] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a653900-6ebe-4c28-8505-3d4d49afd5b9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.466712] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f248a55b-f9ef-4ace-b1fd-bbe30e360958 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.479037] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.487236] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1012.500890] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1012.501086] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.657s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.501421] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.501720] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1014.501767] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1014.523194] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1014.523357] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1014.523502] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1014.523748] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1014.523818] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1014.523913] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1014.524047] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1014.524172] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1014.524292] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1014.524408] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1014.524527] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1014.525090] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1015.771341] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1015.792919] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1015.793152] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.771740] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.772050] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.772201] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.772346] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1019.364852] env[69027]: WARNING oslo_vmware.rw_handles [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1019.364852] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1019.364852] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1019.364852] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1019.364852] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1019.364852] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1019.364852] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1019.364852] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1019.364852] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1019.364852] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1019.364852] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1019.364852] env[69027]: ERROR oslo_vmware.rw_handles [ 1019.365715] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/10de4fa4-cd97-4ab8-b5ff-933548faa405/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1019.367474] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1019.367540] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Copying Virtual Disk [datastore2] vmware_temp/10de4fa4-cd97-4ab8-b5ff-933548faa405/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/10de4fa4-cd97-4ab8-b5ff-933548faa405/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1019.367897] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d156774-ff46-4119-b70b-086f91370f8f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.376606] env[69027]: DEBUG oslo_vmware.api [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Waiting for the task: (returnval){ [ 1019.376606] env[69027]: value = "task-3395131" [ 1019.376606] env[69027]: _type = "Task" [ 1019.376606] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.384666] env[69027]: DEBUG oslo_vmware.api [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Task: {'id': task-3395131, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.887015] env[69027]: DEBUG oslo_vmware.exceptions [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1019.887340] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1019.887891] env[69027]: ERROR nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1019.887891] env[69027]: Faults: ['InvalidArgument'] [ 1019.887891] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Traceback (most recent call last): [ 1019.887891] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1019.887891] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] yield resources [ 1019.887891] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1019.887891] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] self.driver.spawn(context, instance, image_meta, [ 1019.887891] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1019.887891] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1019.887891] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1019.887891] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] self._fetch_image_if_missing(context, vi) [ 1019.887891] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] image_cache(vi, tmp_image_ds_loc) [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] vm_util.copy_virtual_disk( [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] session._wait_for_task(vmdk_copy_task) [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] return self.wait_for_task(task_ref) [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] return evt.wait() [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] result = hub.switch() [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1019.888222] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] return self.greenlet.switch() [ 1019.888569] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1019.888569] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] self.f(*self.args, **self.kw) [ 1019.888569] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1019.888569] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] raise exceptions.translate_fault(task_info.error) [ 1019.888569] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1019.888569] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Faults: ['InvalidArgument'] [ 1019.888569] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] [ 1019.888569] env[69027]: INFO nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Terminating instance [ 1019.889767] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.889985] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1019.890608] env[69027]: DEBUG nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1019.890861] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1019.891103] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f44dec5b-1961-4888-957e-de1faad79569 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.893558] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22dbac3c-942f-4a41-b2e0-ce8c332bf7c8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.900392] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1019.900604] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef35c6c5-5dc8-4b01-802f-bc2cbfa18c53 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.902846] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1019.903178] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1019.903974] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8af959f8-7dab-4062-b33f-6870d968c1c1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.909941] env[69027]: DEBUG oslo_vmware.api [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for the task: (returnval){ [ 1019.909941] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]523fa448-c980-060d-2d91-2b66dbb2c5dc" [ 1019.909941] env[69027]: _type = "Task" [ 1019.909941] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.921893] env[69027]: DEBUG oslo_vmware.api [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]523fa448-c980-060d-2d91-2b66dbb2c5dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.971079] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1019.971153] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1019.971326] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Deleting the datastore file [datastore2] 6ad953b9-4ded-42cd-86e0-2b1b707be4e4 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1019.971617] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15eb0675-9a5d-49f2-b892-938e7a0b95be {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.977898] env[69027]: DEBUG oslo_vmware.api [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Waiting for the task: (returnval){ [ 1019.977898] env[69027]: value = "task-3395133" [ 1019.977898] env[69027]: _type = "Task" [ 1019.977898] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.985634] env[69027]: DEBUG oslo_vmware.api [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Task: {'id': task-3395133, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.420830] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1020.421164] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Creating directory with path [datastore2] vmware_temp/c468d2c7-9a3a-457f-93cd-b99dd01d4e50/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1020.421335] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c471af5b-3dc3-406b-950c-6fbcc7dfa185 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.433149] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Created directory with path [datastore2] vmware_temp/c468d2c7-9a3a-457f-93cd-b99dd01d4e50/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1020.433360] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Fetch image to [datastore2] vmware_temp/c468d2c7-9a3a-457f-93cd-b99dd01d4e50/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1020.433539] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/c468d2c7-9a3a-457f-93cd-b99dd01d4e50/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1020.434299] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df416011-56ef-455e-b5e2-5681031b85fa {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.440919] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c65b7a-ba40-4b32-81bd-f2896240940e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.449801] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc60f31b-ec46-4e78-8eb7-ff9faa5a516f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.482362] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6b5247-e5c4-4709-942c-eb6f4dd61a8d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.489448] env[69027]: DEBUG oslo_vmware.api [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Task: {'id': task-3395133, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073696} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.490982] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.491184] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1020.491364] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1020.491533] env[69027]: INFO nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1020.493648] env[69027]: DEBUG nova.compute.claims [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1020.493815] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.494041] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.496524] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b6216582-cee3-487d-9e32-cc35fb1a9213 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.587015] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1020.637533] env[69027]: DEBUG oslo_vmware.rw_handles [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c468d2c7-9a3a-457f-93cd-b99dd01d4e50/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1020.701044] env[69027]: DEBUG oslo_vmware.rw_handles [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1020.701234] env[69027]: DEBUG oslo_vmware.rw_handles [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c468d2c7-9a3a-457f-93cd-b99dd01d4e50/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1020.906059] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ed38fa-ba93-4c16-aee8-c91e510cb165 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.913775] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78aa2837-0e1a-4054-81ea-23f382c5d168 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.942296] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe6d7b8-16f1-440d-9d1e-760be9097906 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.949622] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534fbea9-756a-47ba-b89f-9e771ad8d318 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.963034] env[69027]: DEBUG nova.compute.provider_tree [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.973634] env[69027]: DEBUG nova.scheduler.client.report [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1020.992793] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.498s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.993307] env[69027]: ERROR nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1020.993307] env[69027]: Faults: ['InvalidArgument'] [ 1020.993307] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Traceback (most recent call last): [ 1020.993307] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1020.993307] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] self.driver.spawn(context, instance, image_meta, [ 1020.993307] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1020.993307] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1020.993307] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1020.993307] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] self._fetch_image_if_missing(context, vi) [ 1020.993307] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1020.993307] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] image_cache(vi, tmp_image_ds_loc) [ 1020.993307] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] vm_util.copy_virtual_disk( [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] session._wait_for_task(vmdk_copy_task) [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] return self.wait_for_task(task_ref) [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] return evt.wait() [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] result = hub.switch() [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] return self.greenlet.switch() [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1020.993823] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] self.f(*self.args, **self.kw) [ 1020.994175] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1020.994175] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] raise exceptions.translate_fault(task_info.error) [ 1020.994175] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1020.994175] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Faults: ['InvalidArgument'] [ 1020.994175] env[69027]: ERROR nova.compute.manager [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] [ 1020.994175] env[69027]: DEBUG nova.compute.utils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1020.995649] env[69027]: DEBUG nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Build of instance 6ad953b9-4ded-42cd-86e0-2b1b707be4e4 was re-scheduled: A specified parameter was not correct: fileType [ 1020.995649] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1020.996040] env[69027]: DEBUG nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1020.996252] env[69027]: DEBUG nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1020.996419] env[69027]: DEBUG nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1020.996605] env[69027]: DEBUG nova.network.neutron [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1021.307348] env[69027]: DEBUG nova.network.neutron [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.318820] env[69027]: INFO nova.compute.manager [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Took 0.32 seconds to deallocate network for instance. [ 1021.418536] env[69027]: INFO nova.scheduler.client.report [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Deleted allocations for instance 6ad953b9-4ded-42cd-86e0-2b1b707be4e4 [ 1021.440866] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9f30f34c-35b2-483b-a8d0-f840635ff012 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 430.372s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.440866] env[69027]: DEBUG oslo_concurrency.lockutils [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 228.188s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.440866] env[69027]: DEBUG oslo_concurrency.lockutils [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.441220] env[69027]: DEBUG oslo_concurrency.lockutils [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.441220] env[69027]: DEBUG oslo_concurrency.lockutils [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.442705] env[69027]: INFO nova.compute.manager [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Terminating instance [ 1021.444325] env[69027]: DEBUG nova.compute.manager [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1021.444946] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1021.445267] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6b6ee4a6-7cf5-4434-8799-ac2fc84d2177 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.455911] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8f907d-e690-41db-bbcc-8d8ed76e0972 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.468178] env[69027]: DEBUG nova.compute.manager [None req-c8674dba-4b6c-4296-997e-d15b6498d5eb tempest-ServerShowV247Test-1206906005 tempest-ServerShowV247Test-1206906005-project-member] [instance: 405e3683-a1c1-4452-91df-4e52ebf25b65] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1021.488819] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6ad953b9-4ded-42cd-86e0-2b1b707be4e4 could not be found. [ 1021.488962] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1021.489062] env[69027]: INFO nova.compute.manager [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1021.489321] env[69027]: DEBUG oslo.service.loopingcall [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1021.489620] env[69027]: DEBUG nova.compute.manager [-] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1021.489662] env[69027]: DEBUG nova.network.neutron [-] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1021.497763] env[69027]: DEBUG nova.compute.manager [None req-c8674dba-4b6c-4296-997e-d15b6498d5eb tempest-ServerShowV247Test-1206906005 tempest-ServerShowV247Test-1206906005-project-member] [instance: 405e3683-a1c1-4452-91df-4e52ebf25b65] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1021.515252] env[69027]: DEBUG nova.network.neutron [-] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.520041] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c8674dba-4b6c-4296-997e-d15b6498d5eb tempest-ServerShowV247Test-1206906005 tempest-ServerShowV247Test-1206906005-project-member] Lock "405e3683-a1c1-4452-91df-4e52ebf25b65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.730s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.523575] env[69027]: INFO nova.compute.manager [-] [instance: 6ad953b9-4ded-42cd-86e0-2b1b707be4e4] Took 0.03 seconds to deallocate network for instance. [ 1021.529149] env[69027]: DEBUG nova.compute.manager [None req-fed35b1c-07ef-4685-995f-10090cad0780 tempest-ServerShowV247Test-1206906005 tempest-ServerShowV247Test-1206906005-project-member] [instance: c49c78cd-90f2-4157-8938-88492ae533ac] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1021.553536] env[69027]: DEBUG nova.compute.manager [None req-fed35b1c-07ef-4685-995f-10090cad0780 tempest-ServerShowV247Test-1206906005 tempest-ServerShowV247Test-1206906005-project-member] [instance: c49c78cd-90f2-4157-8938-88492ae533ac] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1021.582204] env[69027]: DEBUG oslo_concurrency.lockutils [None req-fed35b1c-07ef-4685-995f-10090cad0780 tempest-ServerShowV247Test-1206906005 tempest-ServerShowV247Test-1206906005-project-member] Lock "c49c78cd-90f2-4157-8938-88492ae533ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.957s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.596753] env[69027]: DEBUG nova.compute.manager [None req-bae9b02a-65bc-4934-9466-0ff6649a1f7c tempest-ServersTestFqdnHostnames-901269648 tempest-ServersTestFqdnHostnames-901269648-project-member] [instance: 1fe37f8b-8fac-42fe-ac15-ac1d198d7dfd] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1021.632201] env[69027]: DEBUG oslo_concurrency.lockutils [None req-432b925f-41e2-4638-a76c-5836ab06360c tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "6ad953b9-4ded-42cd-86e0-2b1b707be4e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.634951] env[69027]: DEBUG nova.compute.manager [None req-bae9b02a-65bc-4934-9466-0ff6649a1f7c tempest-ServersTestFqdnHostnames-901269648 tempest-ServersTestFqdnHostnames-901269648-project-member] [instance: 1fe37f8b-8fac-42fe-ac15-ac1d198d7dfd] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1021.658152] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bae9b02a-65bc-4934-9466-0ff6649a1f7c tempest-ServersTestFqdnHostnames-901269648 tempest-ServersTestFqdnHostnames-901269648-project-member] Lock "1fe37f8b-8fac-42fe-ac15-ac1d198d7dfd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.328s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.668060] env[69027]: DEBUG nova.compute.manager [None req-4dc64825-e834-4ebd-8a51-a9937616dee6 tempest-ServerShowV254Test-493597978 tempest-ServerShowV254Test-493597978-project-member] [instance: f86ab9df-e4a8-4515-81ec-a494446efa4c] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1021.694966] env[69027]: DEBUG nova.compute.manager [None req-4dc64825-e834-4ebd-8a51-a9937616dee6 tempest-ServerShowV254Test-493597978 tempest-ServerShowV254Test-493597978-project-member] [instance: f86ab9df-e4a8-4515-81ec-a494446efa4c] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1021.715663] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4dc64825-e834-4ebd-8a51-a9937616dee6 tempest-ServerShowV254Test-493597978 tempest-ServerShowV254Test-493597978-project-member] Lock "f86ab9df-e4a8-4515-81ec-a494446efa4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.992s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.724798] env[69027]: DEBUG nova.compute.manager [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1021.770516] env[69027]: DEBUG oslo_concurrency.lockutils [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.770842] env[69027]: DEBUG oslo_concurrency.lockutils [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.772763] env[69027]: INFO nova.compute.claims [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1021.799366] env[69027]: DEBUG oslo_concurrency.lockutils [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.028s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.800155] env[69027]: DEBUG nova.compute.utils [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Instance 12e2bac8-06ec-43ab-bb9d-9331789aaf10 could not be found. {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1021.802231] env[69027]: DEBUG nova.compute.manager [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Instance disappeared during build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 1021.802509] env[69027]: DEBUG nova.compute.manager [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1021.802835] env[69027]: DEBUG oslo_concurrency.lockutils [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] Acquiring lock "refresh_cache-12e2bac8-06ec-43ab-bb9d-9331789aaf10" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.803081] env[69027]: DEBUG oslo_concurrency.lockutils [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] Acquired lock "refresh_cache-12e2bac8-06ec-43ab-bb9d-9331789aaf10" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.803270] env[69027]: DEBUG nova.network.neutron [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1021.810224] env[69027]: DEBUG nova.compute.utils [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Can not refresh info_cache because instance was not found {{(pid=69027) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1021.834526] env[69027]: DEBUG nova.network.neutron [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1021.974794] env[69027]: DEBUG nova.network.neutron [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.988890] env[69027]: DEBUG oslo_concurrency.lockutils [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] Releasing lock "refresh_cache-12e2bac8-06ec-43ab-bb9d-9331789aaf10" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.989156] env[69027]: DEBUG nova.compute.manager [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1021.989348] env[69027]: DEBUG nova.compute.manager [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1021.989518] env[69027]: DEBUG nova.network.neutron [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1022.010281] env[69027]: DEBUG nova.network.neutron [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1022.018399] env[69027]: DEBUG nova.network.neutron [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.029279] env[69027]: INFO nova.compute.manager [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] [instance: 12e2bac8-06ec-43ab-bb9d-9331789aaf10] Took 0.04 seconds to deallocate network for instance. [ 1022.076421] env[69027]: DEBUG oslo_concurrency.lockutils [None req-82a706f8-b736-442c-9e90-a6dd208882fe tempest-ServerRescueTestJSONUnderV235-1330573226 tempest-ServerRescueTestJSONUnderV235-1330573226-project-member] Lock "12e2bac8-06ec-43ab-bb9d-9331789aaf10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.997s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.085488] env[69027]: DEBUG nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1022.144593] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.144960] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.146624] env[69027]: INFO nova.compute.claims [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.538205] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3ee1d0-1b99-4ffa-98ff-bf5964a45f0f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.545931] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083bf63e-f177-4bc5-8803-818485c5ff6f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.575048] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7fd04d-b811-4329-9bcd-af5565e19c94 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.582424] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4a543e-9591-45a8-9ec7-380d6865d6da {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.595913] env[69027]: DEBUG nova.compute.provider_tree [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1022.604897] env[69027]: DEBUG nova.scheduler.client.report [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1022.622187] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.477s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.622664] env[69027]: DEBUG nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1022.653935] env[69027]: DEBUG nova.compute.utils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1022.656018] env[69027]: DEBUG nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1022.656243] env[69027]: DEBUG nova.network.neutron [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1022.665265] env[69027]: DEBUG nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1022.719848] env[69027]: DEBUG nova.policy [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '187f691cd18746a29dff5fa38f703704', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ff7f2092871426fac0d8ab36997f4be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1022.732624] env[69027]: DEBUG nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1022.759015] env[69027]: DEBUG nova.virt.hardware [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1022.759273] env[69027]: DEBUG nova.virt.hardware [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1022.759433] env[69027]: DEBUG nova.virt.hardware [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1022.759620] env[69027]: DEBUG nova.virt.hardware [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1022.759780] env[69027]: DEBUG nova.virt.hardware [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1022.759952] env[69027]: DEBUG nova.virt.hardware [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1022.760214] env[69027]: DEBUG nova.virt.hardware [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1022.760382] env[69027]: DEBUG nova.virt.hardware [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1022.760548] env[69027]: DEBUG nova.virt.hardware [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1022.760737] env[69027]: DEBUG nova.virt.hardware [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1022.760913] env[69027]: DEBUG nova.virt.hardware [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1022.761775] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6436e5-d369-4a3b-b947-7c8bbf6b5a22 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.769785] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c384f66b-e774-4ccb-b19e-41b9a97984e5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.352733] env[69027]: DEBUG nova.network.neutron [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Successfully created port: ebb1a696-cea4-4a08-993c-71c398054c34 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1024.075786] env[69027]: DEBUG nova.compute.manager [req-92dc3fad-49b5-4e87-8ffb-c1d65a43e434 req-808b2583-140a-49e3-a6f7-beadaa9ddb1b service nova] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Received event network-vif-plugged-ebb1a696-cea4-4a08-993c-71c398054c34 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1024.076059] env[69027]: DEBUG oslo_concurrency.lockutils [req-92dc3fad-49b5-4e87-8ffb-c1d65a43e434 req-808b2583-140a-49e3-a6f7-beadaa9ddb1b service nova] Acquiring lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.076220] env[69027]: DEBUG oslo_concurrency.lockutils [req-92dc3fad-49b5-4e87-8ffb-c1d65a43e434 req-808b2583-140a-49e3-a6f7-beadaa9ddb1b service nova] Lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.076390] env[69027]: DEBUG oslo_concurrency.lockutils [req-92dc3fad-49b5-4e87-8ffb-c1d65a43e434 req-808b2583-140a-49e3-a6f7-beadaa9ddb1b service nova] Lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.076554] env[69027]: DEBUG nova.compute.manager [req-92dc3fad-49b5-4e87-8ffb-c1d65a43e434 req-808b2583-140a-49e3-a6f7-beadaa9ddb1b service nova] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] No waiting events found dispatching network-vif-plugged-ebb1a696-cea4-4a08-993c-71c398054c34 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1024.076717] env[69027]: WARNING nova.compute.manager [req-92dc3fad-49b5-4e87-8ffb-c1d65a43e434 req-808b2583-140a-49e3-a6f7-beadaa9ddb1b service nova] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Received unexpected event network-vif-plugged-ebb1a696-cea4-4a08-993c-71c398054c34 for instance with vm_state building and task_state spawning. [ 1024.162156] env[69027]: DEBUG nova.network.neutron [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Successfully updated port: ebb1a696-cea4-4a08-993c-71c398054c34 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1024.172500] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquiring lock "refresh_cache-c099867e-d9e3-43a4-b2cb-568270d4aa6b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.172750] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquired lock "refresh_cache-c099867e-d9e3-43a4-b2cb-568270d4aa6b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.172975] env[69027]: DEBUG nova.network.neutron [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1024.229707] env[69027]: DEBUG nova.network.neutron [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1024.431760] env[69027]: DEBUG nova.network.neutron [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Updating instance_info_cache with network_info: [{"id": "ebb1a696-cea4-4a08-993c-71c398054c34", "address": "fa:16:3e:39:c8:4c", "network": {"id": "4f4a2802-0cd6-47ff-97b4-a7eec298a766", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1812789423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ff7f2092871426fac0d8ab36997f4be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebb1a696-ce", "ovs_interfaceid": "ebb1a696-cea4-4a08-993c-71c398054c34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.447756] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Releasing lock "refresh_cache-c099867e-d9e3-43a4-b2cb-568270d4aa6b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.448086] env[69027]: DEBUG nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Instance network_info: |[{"id": "ebb1a696-cea4-4a08-993c-71c398054c34", "address": "fa:16:3e:39:c8:4c", "network": {"id": "4f4a2802-0cd6-47ff-97b4-a7eec298a766", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1812789423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ff7f2092871426fac0d8ab36997f4be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebb1a696-ce", "ovs_interfaceid": "ebb1a696-cea4-4a08-993c-71c398054c34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1024.448484] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:c8:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19671de9-8b5b-4710-adc3-7419f3c0f171', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ebb1a696-cea4-4a08-993c-71c398054c34', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1024.456075] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Creating folder: Project (0ff7f2092871426fac0d8ab36997f4be). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1024.456564] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9790fef-3336-47e2-8250-0fd7a32785d2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.467547] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Created folder: Project (0ff7f2092871426fac0d8ab36997f4be) in parent group-v677321. [ 1024.467726] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Creating folder: Instances. Parent ref: group-v677372. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1024.467940] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-861ee8a3-3ea7-4b86-ab50-a35f3e28719f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.475542] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Created folder: Instances in parent group-v677372. [ 1024.475765] env[69027]: DEBUG oslo.service.loopingcall [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1024.475948] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1024.476169] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c90f4996-b090-4a3c-8096-04df943e6ebb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.494258] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1024.494258] env[69027]: value = "task-3395136" [ 1024.494258] env[69027]: _type = "Task" [ 1024.494258] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.505045] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395136, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.004434] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395136, 'name': CreateVM_Task, 'duration_secs': 0.297126} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.004602] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1025.005363] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.005533] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.005874] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1025.006146] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-380ef77d-4bd5-49af-a366-a3f6f5f46a99 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.010829] env[69027]: DEBUG oslo_vmware.api [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Waiting for the task: (returnval){ [ 1025.010829] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5241b69e-6536-278a-9fd6-08352dd15773" [ 1025.010829] env[69027]: _type = "Task" [ 1025.010829] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.023727] env[69027]: DEBUG oslo_vmware.api [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5241b69e-6536-278a-9fd6-08352dd15773, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.522353] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.522699] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1025.522794] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1025.535067] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquiring lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.115274] env[69027]: DEBUG nova.compute.manager [req-b2e94adf-fda0-4c70-959e-a64bfa4f2571 req-e6bf4098-b962-40fc-92f4-79bc0dd47ad6 service nova] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Received event network-changed-ebb1a696-cea4-4a08-993c-71c398054c34 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1026.115274] env[69027]: DEBUG nova.compute.manager [req-b2e94adf-fda0-4c70-959e-a64bfa4f2571 req-e6bf4098-b962-40fc-92f4-79bc0dd47ad6 service nova] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Refreshing instance network info cache due to event network-changed-ebb1a696-cea4-4a08-993c-71c398054c34. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1026.115274] env[69027]: DEBUG oslo_concurrency.lockutils [req-b2e94adf-fda0-4c70-959e-a64bfa4f2571 req-e6bf4098-b962-40fc-92f4-79bc0dd47ad6 service nova] Acquiring lock "refresh_cache-c099867e-d9e3-43a4-b2cb-568270d4aa6b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.115400] env[69027]: DEBUG oslo_concurrency.lockutils [req-b2e94adf-fda0-4c70-959e-a64bfa4f2571 req-e6bf4098-b962-40fc-92f4-79bc0dd47ad6 service nova] Acquired lock "refresh_cache-c099867e-d9e3-43a4-b2cb-568270d4aa6b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.115530] env[69027]: DEBUG nova.network.neutron [req-b2e94adf-fda0-4c70-959e-a64bfa4f2571 req-e6bf4098-b962-40fc-92f4-79bc0dd47ad6 service nova] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Refreshing network info cache for port ebb1a696-cea4-4a08-993c-71c398054c34 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1026.386797] env[69027]: DEBUG nova.network.neutron [req-b2e94adf-fda0-4c70-959e-a64bfa4f2571 req-e6bf4098-b962-40fc-92f4-79bc0dd47ad6 service nova] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Updated VIF entry in instance network info cache for port ebb1a696-cea4-4a08-993c-71c398054c34. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1026.387163] env[69027]: DEBUG nova.network.neutron [req-b2e94adf-fda0-4c70-959e-a64bfa4f2571 req-e6bf4098-b962-40fc-92f4-79bc0dd47ad6 service nova] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Updating instance_info_cache with network_info: [{"id": "ebb1a696-cea4-4a08-993c-71c398054c34", "address": "fa:16:3e:39:c8:4c", "network": {"id": "4f4a2802-0cd6-47ff-97b4-a7eec298a766", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1812789423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0ff7f2092871426fac0d8ab36997f4be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapebb1a696-ce", "ovs_interfaceid": "ebb1a696-cea4-4a08-993c-71c398054c34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.396527] env[69027]: DEBUG oslo_concurrency.lockutils [req-b2e94adf-fda0-4c70-959e-a64bfa4f2571 req-e6bf4098-b962-40fc-92f4-79bc0dd47ad6 service nova] Releasing lock "refresh_cache-c099867e-d9e3-43a4-b2cb-568270d4aa6b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.719942] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquiring lock "1715faa2-86ea-49f9-a993-1003aea54384" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.720297] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "1715faa2-86ea-49f9-a993-1003aea54384" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.384146] env[69027]: DEBUG oslo_concurrency.lockutils [None req-76159d44-04cc-4630-8c6a-18dda9f4f1f6 tempest-InstanceActionsNegativeTestJSON-598876948 tempest-InstanceActionsNegativeTestJSON-598876948-project-member] Acquiring lock "5401d655-86da-41b0-9d29-3ba25d21f1ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.384495] env[69027]: DEBUG oslo_concurrency.lockutils [None req-76159d44-04cc-4630-8c6a-18dda9f4f1f6 tempest-InstanceActionsNegativeTestJSON-598876948 tempest-InstanceActionsNegativeTestJSON-598876948-project-member] Lock "5401d655-86da-41b0-9d29-3ba25d21f1ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.029523] env[69027]: DEBUG oslo_concurrency.lockutils [None req-dd737303-2d0a-47fa-ac07-a9e9419d00e1 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "16b7e547-8dc8-4305-8ff6-64736fc9cbb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.029523] env[69027]: DEBUG oslo_concurrency.lockutils [None req-dd737303-2d0a-47fa-ac07-a9e9419d00e1 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "16b7e547-8dc8-4305-8ff6-64736fc9cbb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.530015] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9cad0cf8-f8c6-426f-85d9-a82e8458cd3b tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Acquiring lock "f4d49026-8558-44c7-b475-215eecba4e09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.530297] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9cad0cf8-f8c6-426f-85d9-a82e8458cd3b tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Lock "f4d49026-8558-44c7-b475-215eecba4e09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.554228] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9cad0cf8-f8c6-426f-85d9-a82e8458cd3b tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Acquiring lock "fb7d7808-9768-4882-9405-0d07c41509fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.554447] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9cad0cf8-f8c6-426f-85d9-a82e8458cd3b tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Lock "fb7d7808-9768-4882-9405-0d07c41509fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.119274] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6b581503-68a2-4485-ac65-aaebe7770868 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "c39e4257-7526-461e-ad95-91defe4d51ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.119578] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6b581503-68a2-4485-ac65-aaebe7770868 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "c39e4257-7526-461e-ad95-91defe4d51ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1053.907274] env[69027]: DEBUG oslo_concurrency.lockutils [None req-00808fa4-ac0f-4b32-b8b4-ffc6d98272bd tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] Acquiring lock "e6224db8-1a05-4832-95be-7231fda105f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1053.907513] env[69027]: DEBUG oslo_concurrency.lockutils [None req-00808fa4-ac0f-4b32-b8b4-ffc6d98272bd tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] Lock "e6224db8-1a05-4832-95be-7231fda105f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1055.790018] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b9091b43-688c-44d6-bf02-1fc70c4f47f0 tempest-ServersTestMultiNic-1655463347 tempest-ServersTestMultiNic-1655463347-project-member] Acquiring lock "03e2d14d-9195-4ee5-b2e0-05b803dcfefc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1055.790018] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b9091b43-688c-44d6-bf02-1fc70c4f47f0 tempest-ServersTestMultiNic-1655463347 tempest-ServersTestMultiNic-1655463347-project-member] Lock "03e2d14d-9195-4ee5-b2e0-05b803dcfefc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.819316] env[69027]: DEBUG oslo_concurrency.lockutils [None req-50e2b5d3-22cf-4de7-95d3-24295f8a6a2b tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "45296842-f415-42eb-b67e-096465650c09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.819651] env[69027]: DEBUG oslo_concurrency.lockutils [None req-50e2b5d3-22cf-4de7-95d3-24295f8a6a2b tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "45296842-f415-42eb-b67e-096465650c09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.407989] env[69027]: DEBUG oslo_concurrency.lockutils [None req-79a7f2a0-af60-4580-94b4-10d47aa34834 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] Acquiring lock "34a1076e-6a17-442d-8a71-1d49117edad5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.408499] env[69027]: DEBUG oslo_concurrency.lockutils [None req-79a7f2a0-af60-4580-94b4-10d47aa34834 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] Lock "34a1076e-6a17-442d-8a71-1d49117edad5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.762064] env[69027]: WARNING oslo_vmware.rw_handles [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1069.762064] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1069.762064] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1069.762064] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1069.762064] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1069.762064] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1069.762064] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1069.762064] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1069.762064] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1069.762064] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1069.762064] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1069.762064] env[69027]: ERROR oslo_vmware.rw_handles [ 1069.762064] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/c468d2c7-9a3a-457f-93cd-b99dd01d4e50/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1069.763735] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1069.764009] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Copying Virtual Disk [datastore2] vmware_temp/c468d2c7-9a3a-457f-93cd-b99dd01d4e50/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/c468d2c7-9a3a-457f-93cd-b99dd01d4e50/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1069.764306] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9f17619-de27-47c3-9c8c-4eadf1f0aec3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.771847] env[69027]: DEBUG oslo_vmware.api [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for the task: (returnval){ [ 1069.771847] env[69027]: value = "task-3395142" [ 1069.771847] env[69027]: _type = "Task" [ 1069.771847] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.780542] env[69027]: DEBUG oslo_vmware.api [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Task: {'id': task-3395142, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.282197] env[69027]: DEBUG oslo_vmware.exceptions [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1070.282547] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.283115] env[69027]: ERROR nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1070.283115] env[69027]: Faults: ['InvalidArgument'] [ 1070.283115] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Traceback (most recent call last): [ 1070.283115] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1070.283115] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] yield resources [ 1070.283115] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1070.283115] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] self.driver.spawn(context, instance, image_meta, [ 1070.283115] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1070.283115] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1070.283115] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1070.283115] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] self._fetch_image_if_missing(context, vi) [ 1070.283115] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] image_cache(vi, tmp_image_ds_loc) [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] vm_util.copy_virtual_disk( [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] session._wait_for_task(vmdk_copy_task) [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] return self.wait_for_task(task_ref) [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] return evt.wait() [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] result = hub.switch() [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1070.283434] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] return self.greenlet.switch() [ 1070.284338] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1070.284338] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] self.f(*self.args, **self.kw) [ 1070.284338] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1070.284338] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] raise exceptions.translate_fault(task_info.error) [ 1070.284338] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1070.284338] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Faults: ['InvalidArgument'] [ 1070.284338] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] [ 1070.284338] env[69027]: INFO nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Terminating instance [ 1070.285065] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.285355] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1070.285893] env[69027]: DEBUG nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1070.286098] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1070.286324] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dacf71d-af23-4353-99d5-0521d1cf0e81 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.290174] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad5fcfa-2a84-436f-9ead-de999f4bff7c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.296772] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1070.296989] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa67f13f-ffc9-4898-a43b-daf7588ba402 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.299107] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1070.299301] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1070.300204] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c5f336b-7a3e-491d-b609-5d1abfb4fd05 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.304898] env[69027]: DEBUG oslo_vmware.api [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 1070.304898] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52459ca6-9df0-15f0-19c4-3cc213b81adf" [ 1070.304898] env[69027]: _type = "Task" [ 1070.304898] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.311714] env[69027]: DEBUG oslo_vmware.api [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52459ca6-9df0-15f0-19c4-3cc213b81adf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.767295] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1070.814719] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1070.814976] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Creating directory with path [datastore2] vmware_temp/b3b8ae3e-fb54-4c31-956e-f4932617d82a/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1070.816122] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15e0e5ca-810a-40ab-b3ae-9b5afd0c6d58 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.818017] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1070.818232] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1070.818408] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Deleting the datastore file [datastore2] 7c4204b8-2858-43a5-855d-c99b00e91d0d {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1070.818628] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a6da9da-cd6e-4ea5-a494-6a50fc171606 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.825386] env[69027]: DEBUG oslo_vmware.api [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for the task: (returnval){ [ 1070.825386] env[69027]: value = "task-3395144" [ 1070.825386] env[69027]: _type = "Task" [ 1070.825386] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.829892] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Created directory with path [datastore2] vmware_temp/b3b8ae3e-fb54-4c31-956e-f4932617d82a/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1070.830103] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Fetch image to [datastore2] vmware_temp/b3b8ae3e-fb54-4c31-956e-f4932617d82a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1070.830394] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/b3b8ae3e-fb54-4c31-956e-f4932617d82a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1070.833469] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f36db7-a705-4e98-a474-f5604a5717f7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.835660] env[69027]: DEBUG oslo_vmware.api [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Task: {'id': task-3395144, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.839830] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0aaa50-63e3-4091-af50-33a49c5ef29b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.848690] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fdc156d-0254-430e-b791-816637882c84 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.878161] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3420d5-abc8-4040-8a49-aa064cc23563 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.884024] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0ab72f4a-6ed4-440d-ba97-800e88a877e8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.903449] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1070.956615] env[69027]: DEBUG oslo_vmware.rw_handles [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b3b8ae3e-fb54-4c31-956e-f4932617d82a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1071.018962] env[69027]: DEBUG oslo_vmware.rw_handles [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1071.019196] env[69027]: DEBUG oslo_vmware.rw_handles [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b3b8ae3e-fb54-4c31-956e-f4932617d82a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1071.335884] env[69027]: DEBUG oslo_vmware.api [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Task: {'id': task-3395144, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08257} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.336170] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.336354] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1071.336541] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1071.336704] env[69027]: INFO nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Took 1.05 seconds to destroy the instance on the hypervisor. [ 1071.338821] env[69027]: DEBUG nova.compute.claims [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1071.339011] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.339283] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.632754] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f333a5b-6f8e-4909-a6e8-8c76ba764d3b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.640158] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f33ae7-70eb-4153-b7e3-5494bc6c8bc4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.669686] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b070eb0-7cd3-4373-a67d-1aff2ec77d25 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.676864] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83490bba-6353-4eaa-9a62-da6c40d0b0d3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.690432] env[69027]: DEBUG nova.compute.provider_tree [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1071.699029] env[69027]: DEBUG nova.scheduler.client.report [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1071.716577] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.377s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.717046] env[69027]: ERROR nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1071.717046] env[69027]: Faults: ['InvalidArgument'] [ 1071.717046] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Traceback (most recent call last): [ 1071.717046] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1071.717046] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] self.driver.spawn(context, instance, image_meta, [ 1071.717046] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1071.717046] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1071.717046] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1071.717046] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] self._fetch_image_if_missing(context, vi) [ 1071.717046] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1071.717046] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] image_cache(vi, tmp_image_ds_loc) [ 1071.717046] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] vm_util.copy_virtual_disk( [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] session._wait_for_task(vmdk_copy_task) [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] return self.wait_for_task(task_ref) [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] return evt.wait() [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] result = hub.switch() [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] return self.greenlet.switch() [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1071.717478] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] self.f(*self.args, **self.kw) [ 1071.717839] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1071.717839] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] raise exceptions.translate_fault(task_info.error) [ 1071.717839] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1071.717839] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Faults: ['InvalidArgument'] [ 1071.717839] env[69027]: ERROR nova.compute.manager [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] [ 1071.717984] env[69027]: DEBUG nova.compute.utils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1071.719689] env[69027]: DEBUG nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Build of instance 7c4204b8-2858-43a5-855d-c99b00e91d0d was re-scheduled: A specified parameter was not correct: fileType [ 1071.719689] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1071.720072] env[69027]: DEBUG nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1071.720249] env[69027]: DEBUG nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1071.720418] env[69027]: DEBUG nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1071.720580] env[69027]: DEBUG nova.network.neutron [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1071.771402] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1071.783736] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.783960] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.784141] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.784297] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1071.785425] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a03d565-24be-40bb-8894-c152b044b006 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.793765] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610d4f62-0470-47fb-a7c5-7aa44c85506f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.808481] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de00858-6783-4a00-ad67-712166571805 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.815149] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fecbc6-2854-4edb-bd1f-deef0ea5d770 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.844149] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180980MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1071.844323] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.844527] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.932023] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 7c4204b8-2858-43a5-855d-c99b00e91d0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.932023] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 339bab90-238a-47ab-89f5-1ff9541ec14d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.932023] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b6a38a84-0b95-494c-a423-3360824ed8d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.932023] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.932518] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.932518] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.932518] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ed95b65-233e-406e-8d27-2a5cd2694184 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.932518] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbd6a238-1662-4c22-86ab-d31d4bb82734 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.932639] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.932639] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1071.943169] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bf4c80b4-bc0c-4198-9010-74fc50707745 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.954128] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 84e96a3a-fbb3-4a54-b5c2-c0c463b9e8bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.964617] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.974820] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a907f1ab-3540-4bc0-8389-005233cca940 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.986742] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1715faa2-86ea-49f9-a993-1003aea54384 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1071.999659] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 5401d655-86da-41b0-9d29-3ba25d21f1ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.017159] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 16b7e547-8dc8-4305-8ff6-64736fc9cbb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.028230] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f4d49026-8558-44c7-b475-215eecba4e09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.039910] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fb7d7808-9768-4882-9405-0d07c41509fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.053950] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c39e4257-7526-461e-ad95-91defe4d51ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.071824] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance e6224db8-1a05-4832-95be-7231fda105f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.084496] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03e2d14d-9195-4ee5-b2e0-05b803dcfefc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.100247] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 45296842-f415-42eb-b67e-096465650c09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.101534] env[69027]: DEBUG nova.network.neutron [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.113413] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 34a1076e-6a17-442d-8a71-1d49117edad5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1072.113667] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1072.113811] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1728MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1072.118220] env[69027]: INFO nova.compute.manager [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Took 0.40 seconds to deallocate network for instance. [ 1072.215962] env[69027]: INFO nova.scheduler.client.report [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Deleted allocations for instance 7c4204b8-2858-43a5-855d-c99b00e91d0d [ 1072.240611] env[69027]: DEBUG oslo_concurrency.lockutils [None req-49eb4ca3-9f61-4ccd-af1f-485400acd4be tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "7c4204b8-2858-43a5-855d-c99b00e91d0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 478.923s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.241806] env[69027]: DEBUG oslo_concurrency.lockutils [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "7c4204b8-2858-43a5-855d-c99b00e91d0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 78.435s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.242042] env[69027]: DEBUG oslo_concurrency.lockutils [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "7c4204b8-2858-43a5-855d-c99b00e91d0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1072.242527] env[69027]: DEBUG oslo_concurrency.lockutils [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "7c4204b8-2858-43a5-855d-c99b00e91d0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1072.242527] env[69027]: DEBUG oslo_concurrency.lockutils [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "7c4204b8-2858-43a5-855d-c99b00e91d0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.245116] env[69027]: INFO nova.compute.manager [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Terminating instance [ 1072.249614] env[69027]: DEBUG nova.compute.manager [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1072.249782] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1072.250015] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5be2641-139b-4188-b801-4fc3e7074603 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.255669] env[69027]: DEBUG nova.compute.manager [None req-d849cff0-e8e6-4113-ad53-ca385754edb9 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 40b03034-9a4d-4c60-9847-9e24963b0d0f] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.264407] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91683064-925b-41cf-8979-593ed545ceeb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.281028] env[69027]: DEBUG nova.compute.manager [None req-d849cff0-e8e6-4113-ad53-ca385754edb9 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 40b03034-9a4d-4c60-9847-9e24963b0d0f] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.294224] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7c4204b8-2858-43a5-855d-c99b00e91d0d could not be found. [ 1072.294430] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1072.294608] env[69027]: INFO nova.compute.manager [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1072.294847] env[69027]: DEBUG oslo.service.loopingcall [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1072.297364] env[69027]: DEBUG nova.compute.manager [-] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1072.297466] env[69027]: DEBUG nova.network.neutron [-] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1072.318569] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d849cff0-e8e6-4113-ad53-ca385754edb9 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "40b03034-9a4d-4c60-9847-9e24963b0d0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.459s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.322905] env[69027]: DEBUG nova.network.neutron [-] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.332187] env[69027]: DEBUG nova.compute.manager [None req-9fadb744-1482-4829-a343-0abe932ebe3c tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: b2a14f3e-3920-4c24-96bc-e11cffc4ad57] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.335774] env[69027]: INFO nova.compute.manager [-] [instance: 7c4204b8-2858-43a5-855d-c99b00e91d0d] Took 0.04 seconds to deallocate network for instance. [ 1072.360000] env[69027]: DEBUG nova.compute.manager [None req-9fadb744-1482-4829-a343-0abe932ebe3c tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: b2a14f3e-3920-4c24-96bc-e11cffc4ad57] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.382732] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9fadb744-1482-4829-a343-0abe932ebe3c tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "b2a14f3e-3920-4c24-96bc-e11cffc4ad57" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.781s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.392101] env[69027]: DEBUG nova.compute.manager [None req-ace109cb-8a8f-4f8d-8348-cb4429a75987 tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] [instance: ac96fbcc-59d8-4625-a705-14410e0beec3] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.426149] env[69027]: DEBUG nova.compute.manager [None req-ace109cb-8a8f-4f8d-8348-cb4429a75987 tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] [instance: ac96fbcc-59d8-4625-a705-14410e0beec3] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.441059] env[69027]: DEBUG oslo_concurrency.lockutils [None req-45228409-4c37-4df2-a6fa-8c436384ad1a tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "7c4204b8-2858-43a5-855d-c99b00e91d0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.450288] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ace109cb-8a8f-4f8d-8348-cb4429a75987 tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Lock "ac96fbcc-59d8-4625-a705-14410e0beec3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.358s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.460309] env[69027]: DEBUG nova.compute.manager [None req-ace109cb-8a8f-4f8d-8348-cb4429a75987 tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] [instance: 8c97744d-c2ff-477a-a973-d90d6b526559] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.484874] env[69027]: DEBUG nova.compute.manager [None req-ace109cb-8a8f-4f8d-8348-cb4429a75987 tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] [instance: 8c97744d-c2ff-477a-a973-d90d6b526559] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.501835] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ace109cb-8a8f-4f8d-8348-cb4429a75987 tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Lock "8c97744d-c2ff-477a-a973-d90d6b526559" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.379s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.512359] env[69027]: DEBUG nova.compute.manager [None req-cf1eaba9-0715-4331-8f2c-4b67d539a3df tempest-ServersAaction247Test-332079165 tempest-ServersAaction247Test-332079165-project-member] [instance: 6aa59a08-032a-4de9-8fef-cef1b176a046] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.531671] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22449f04-3b6e-4bbb-bab5-b429e1786828 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.535962] env[69027]: DEBUG nova.compute.manager [None req-cf1eaba9-0715-4331-8f2c-4b67d539a3df tempest-ServersAaction247Test-332079165 tempest-ServersAaction247Test-332079165-project-member] [instance: 6aa59a08-032a-4de9-8fef-cef1b176a046] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.539494] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6058da-770d-4a32-be36-877e3118e48e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.570468] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681d054f-5ddd-401d-8553-53e89fca3ae0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.577123] env[69027]: DEBUG oslo_concurrency.lockutils [None req-cf1eaba9-0715-4331-8f2c-4b67d539a3df tempest-ServersAaction247Test-332079165 tempest-ServersAaction247Test-332079165-project-member] Lock "6aa59a08-032a-4de9-8fef-cef1b176a046" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.364s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.578273] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-419f6521-3fb4-4b72-ad39-b762621f1ea7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.592830] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.593848] env[69027]: DEBUG nova.compute.manager [None req-e55d8d76-3f37-4538-9b49-c361368c032e tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] [instance: 9cd6b8ee-27a8-4535-9550-29dd51fca73c] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.600228] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1072.612471] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1072.612647] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.768s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.615642] env[69027]: DEBUG nova.compute.manager [None req-e55d8d76-3f37-4538-9b49-c361368c032e tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] [instance: 9cd6b8ee-27a8-4535-9550-29dd51fca73c] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.633494] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e55d8d76-3f37-4538-9b49-c361368c032e tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] Lock "9cd6b8ee-27a8-4535-9550-29dd51fca73c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.928s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.641242] env[69027]: DEBUG nova.compute.manager [None req-391beaea-9bb3-42e4-a081-0f07634a8349 tempest-ServersTestMultiNic-1655463347 tempest-ServersTestMultiNic-1655463347-project-member] [instance: 9ab15458-940f-490d-9aae-858f9f928a80] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.662856] env[69027]: DEBUG nova.compute.manager [None req-391beaea-9bb3-42e4-a081-0f07634a8349 tempest-ServersTestMultiNic-1655463347 tempest-ServersTestMultiNic-1655463347-project-member] [instance: 9ab15458-940f-490d-9aae-858f9f928a80] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.681820] env[69027]: DEBUG oslo_concurrency.lockutils [None req-391beaea-9bb3-42e4-a081-0f07634a8349 tempest-ServersTestMultiNic-1655463347 tempest-ServersTestMultiNic-1655463347-project-member] Lock "9ab15458-940f-490d-9aae-858f9f928a80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.588s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.690575] env[69027]: DEBUG nova.compute.manager [None req-f9a5890f-eff8-4ee2-b7d0-12770a9b5cc7 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: ed1fc438-2682-405c-94f3-42b8db784c47] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.717558] env[69027]: DEBUG nova.compute.manager [None req-f9a5890f-eff8-4ee2-b7d0-12770a9b5cc7 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: ed1fc438-2682-405c-94f3-42b8db784c47] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.738147] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f9a5890f-eff8-4ee2-b7d0-12770a9b5cc7 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "ed1fc438-2682-405c-94f3-42b8db784c47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.765s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.747932] env[69027]: DEBUG nova.compute.manager [None req-a1ea9afe-f45d-4f09-ac2e-08d22b648973 tempest-ServerActionsTestOtherB-530383552 tempest-ServerActionsTestOtherB-530383552-project-member] [instance: f6a3e604-e5be-4633-91b0-d8790cc4b810] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.773195] env[69027]: DEBUG nova.compute.manager [None req-a1ea9afe-f45d-4f09-ac2e-08d22b648973 tempest-ServerActionsTestOtherB-530383552 tempest-ServerActionsTestOtherB-530383552-project-member] [instance: f6a3e604-e5be-4633-91b0-d8790cc4b810] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.794347] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a1ea9afe-f45d-4f09-ac2e-08d22b648973 tempest-ServerActionsTestOtherB-530383552 tempest-ServerActionsTestOtherB-530383552-project-member] Lock "f6a3e604-e5be-4633-91b0-d8790cc4b810" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.089s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.803856] env[69027]: DEBUG nova.compute.manager [None req-21d37828-0151-4169-a4c9-01288948fcfe tempest-InstanceActionsV221TestJSON-1059247159 tempest-InstanceActionsV221TestJSON-1059247159-project-member] [instance: 68bd33f5-18fb-4ab5-8b23-98c2a94ec36a] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.829445] env[69027]: DEBUG nova.compute.manager [None req-21d37828-0151-4169-a4c9-01288948fcfe tempest-InstanceActionsV221TestJSON-1059247159 tempest-InstanceActionsV221TestJSON-1059247159-project-member] [instance: 68bd33f5-18fb-4ab5-8b23-98c2a94ec36a] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.854138] env[69027]: DEBUG oslo_concurrency.lockutils [None req-21d37828-0151-4169-a4c9-01288948fcfe tempest-InstanceActionsV221TestJSON-1059247159 tempest-InstanceActionsV221TestJSON-1059247159-project-member] Lock "68bd33f5-18fb-4ab5-8b23-98c2a94ec36a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.539s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.867803] env[69027]: DEBUG nova.compute.manager [None req-f282a93c-7498-491a-be03-e364795bff7d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 74bb614d-40ee-4e2b-8085-8351f85fe1ed] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.895488] env[69027]: DEBUG nova.compute.manager [None req-f282a93c-7498-491a-be03-e364795bff7d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 74bb614d-40ee-4e2b-8085-8351f85fe1ed] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.924942] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f282a93c-7498-491a-be03-e364795bff7d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "74bb614d-40ee-4e2b-8085-8351f85fe1ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.976s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.937617] env[69027]: DEBUG nova.compute.manager [None req-2fb33b5d-c20f-441c-8ca2-5e9661477483 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] [instance: 51ec5aa4-027a-4f24-acac-1b6933e679de] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1072.976063] env[69027]: DEBUG nova.compute.manager [None req-2fb33b5d-c20f-441c-8ca2-5e9661477483 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] [instance: 51ec5aa4-027a-4f24-acac-1b6933e679de] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1072.996852] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2fb33b5d-c20f-441c-8ca2-5e9661477483 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] Lock "51ec5aa4-027a-4f24-acac-1b6933e679de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.901s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.008764] env[69027]: DEBUG nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1073.063822] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1073.064441] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.065868] env[69027]: INFO nova.compute.claims [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.411684] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be1a3a7-d0fa-4049-ac0b-12b1c6cfc7e4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.420551] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc6cbc9-f123-424c-895c-6dcfcd04ce24 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.453676] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd149254-f077-4409-a48c-b310c956d4fc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.461093] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdef6844-3c28-401d-bd57-2f8cb2ab5a7d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.473886] env[69027]: DEBUG nova.compute.provider_tree [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1073.482874] env[69027]: DEBUG nova.scheduler.client.report [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1073.503320] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.439s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1073.504095] env[69027]: DEBUG nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1073.540105] env[69027]: DEBUG nova.compute.utils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1073.542630] env[69027]: DEBUG nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1073.542846] env[69027]: DEBUG nova.network.neutron [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1073.550357] env[69027]: DEBUG nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1073.614685] env[69027]: DEBUG nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1073.643105] env[69027]: DEBUG nova.virt.hardware [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1073.643358] env[69027]: DEBUG nova.virt.hardware [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1073.643520] env[69027]: DEBUG nova.virt.hardware [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1073.643719] env[69027]: DEBUG nova.virt.hardware [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1073.643865] env[69027]: DEBUG nova.virt.hardware [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1073.644021] env[69027]: DEBUG nova.virt.hardware [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1073.644241] env[69027]: DEBUG nova.virt.hardware [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1073.644412] env[69027]: DEBUG nova.virt.hardware [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1073.644577] env[69027]: DEBUG nova.virt.hardware [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1073.644747] env[69027]: DEBUG nova.virt.hardware [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1073.644924] env[69027]: DEBUG nova.virt.hardware [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1073.645804] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3d6a51-abae-4877-94c0-180194be5c68 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.653920] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fbdc28-cd50-462b-a9fe-d31ee033a3e3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.852792] env[69027]: DEBUG nova.policy [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84cb84f21baf49b0b331f601a107c990', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '78a13bae334a4ef7959f0d408926ca33', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1074.601656] env[69027]: DEBUG nova.network.neutron [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Successfully created port: ad41719c-1897-4542-bd65-ae8ffb70e107 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1075.666576] env[69027]: DEBUG nova.compute.manager [req-7901f6cc-9702-4db1-be53-8b78aa7857a2 req-261c16df-abfc-4947-9565-f1c519853aff service nova] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Received event network-vif-plugged-ad41719c-1897-4542-bd65-ae8ffb70e107 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1075.666576] env[69027]: DEBUG oslo_concurrency.lockutils [req-7901f6cc-9702-4db1-be53-8b78aa7857a2 req-261c16df-abfc-4947-9565-f1c519853aff service nova] Acquiring lock "bf4c80b4-bc0c-4198-9010-74fc50707745-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.666576] env[69027]: DEBUG oslo_concurrency.lockutils [req-7901f6cc-9702-4db1-be53-8b78aa7857a2 req-261c16df-abfc-4947-9565-f1c519853aff service nova] Lock "bf4c80b4-bc0c-4198-9010-74fc50707745-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.666576] env[69027]: DEBUG oslo_concurrency.lockutils [req-7901f6cc-9702-4db1-be53-8b78aa7857a2 req-261c16df-abfc-4947-9565-f1c519853aff service nova] Lock "bf4c80b4-bc0c-4198-9010-74fc50707745-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.667096] env[69027]: DEBUG nova.compute.manager [req-7901f6cc-9702-4db1-be53-8b78aa7857a2 req-261c16df-abfc-4947-9565-f1c519853aff service nova] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] No waiting events found dispatching network-vif-plugged-ad41719c-1897-4542-bd65-ae8ffb70e107 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1075.667096] env[69027]: WARNING nova.compute.manager [req-7901f6cc-9702-4db1-be53-8b78aa7857a2 req-261c16df-abfc-4947-9565-f1c519853aff service nova] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Received unexpected event network-vif-plugged-ad41719c-1897-4542-bd65-ae8ffb70e107 for instance with vm_state building and task_state spawning. [ 1075.739310] env[69027]: DEBUG nova.network.neutron [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Successfully updated port: ad41719c-1897-4542-bd65-ae8ffb70e107 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1075.761267] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "refresh_cache-bf4c80b4-bc0c-4198-9010-74fc50707745" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1075.762428] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquired lock "refresh_cache-bf4c80b4-bc0c-4198-9010-74fc50707745" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.762660] env[69027]: DEBUG nova.network.neutron [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1075.820969] env[69027]: DEBUG nova.network.neutron [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1076.010062] env[69027]: DEBUG nova.network.neutron [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Updating instance_info_cache with network_info: [{"id": "ad41719c-1897-4542-bd65-ae8ffb70e107", "address": "fa:16:3e:67:d3:8a", "network": {"id": "404b4355-dae6-4ce3-a75a-51ccc2099a23", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-164317621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78a13bae334a4ef7959f0d408926ca33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad41719c-18", "ovs_interfaceid": "ad41719c-1897-4542-bd65-ae8ffb70e107", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.029040] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Releasing lock "refresh_cache-bf4c80b4-bc0c-4198-9010-74fc50707745" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.029040] env[69027]: DEBUG nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Instance network_info: |[{"id": "ad41719c-1897-4542-bd65-ae8ffb70e107", "address": "fa:16:3e:67:d3:8a", "network": {"id": "404b4355-dae6-4ce3-a75a-51ccc2099a23", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-164317621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78a13bae334a4ef7959f0d408926ca33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad41719c-18", "ovs_interfaceid": "ad41719c-1897-4542-bd65-ae8ffb70e107", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1076.029548] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:d3:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24376631-ee89-4ff1-b8ac-f09911fc8329', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad41719c-1897-4542-bd65-ae8ffb70e107', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1076.037479] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Creating folder: Project (78a13bae334a4ef7959f0d408926ca33). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1076.038097] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c336612e-416f-4467-9506-e7055d23c0d2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.051584] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Created folder: Project (78a13bae334a4ef7959f0d408926ca33) in parent group-v677321. [ 1076.051753] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Creating folder: Instances. Parent ref: group-v677379. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1076.051982] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26740837-d923-4e73-8470-321ea639928e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.061032] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Created folder: Instances in parent group-v677379. [ 1076.061306] env[69027]: DEBUG oslo.service.loopingcall [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1076.061556] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1076.061798] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5747e83-d35a-4cd5-8c1e-efb3152a51b1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.082276] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1076.082276] env[69027]: value = "task-3395150" [ 1076.082276] env[69027]: _type = "Task" [ 1076.082276] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.088949] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395150, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.590874] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395150, 'name': CreateVM_Task, 'duration_secs': 0.278872} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.591070] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1076.591823] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.591991] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.592339] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1076.592600] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2437e465-7257-45ba-ac98-ba1d421eb4b2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.597262] env[69027]: DEBUG oslo_vmware.api [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for the task: (returnval){ [ 1076.597262] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]529748bf-c917-42a2-ac61-9dfa4b4ca439" [ 1076.597262] env[69027]: _type = "Task" [ 1076.597262] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.613111] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.613111] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1076.614192] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1076.618019] env[69027]: DEBUG oslo_vmware.api [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]529748bf-c917-42a2-ac61-9dfa4b4ca439, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.640659] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1076.640807] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1076.640934] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1076.641069] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1076.641190] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1076.641309] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1076.641452] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1076.641573] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1076.641689] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1076.641803] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1076.641917] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1076.642448] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.642629] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.771604] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.111952] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1077.112198] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1077.113046] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1077.805382] env[69027]: DEBUG nova.compute.manager [req-75b2d517-116a-419b-a50d-ccb3b5f5f394 req-bbc48dc4-97f9-4106-9e7c-79a5f7db9dce service nova] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Received event network-changed-ad41719c-1897-4542-bd65-ae8ffb70e107 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1077.805986] env[69027]: DEBUG nova.compute.manager [req-75b2d517-116a-419b-a50d-ccb3b5f5f394 req-bbc48dc4-97f9-4106-9e7c-79a5f7db9dce service nova] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Refreshing instance network info cache due to event network-changed-ad41719c-1897-4542-bd65-ae8ffb70e107. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1077.807051] env[69027]: DEBUG oslo_concurrency.lockutils [req-75b2d517-116a-419b-a50d-ccb3b5f5f394 req-bbc48dc4-97f9-4106-9e7c-79a5f7db9dce service nova] Acquiring lock "refresh_cache-bf4c80b4-bc0c-4198-9010-74fc50707745" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1077.807051] env[69027]: DEBUG oslo_concurrency.lockutils [req-75b2d517-116a-419b-a50d-ccb3b5f5f394 req-bbc48dc4-97f9-4106-9e7c-79a5f7db9dce service nova] Acquired lock "refresh_cache-bf4c80b4-bc0c-4198-9010-74fc50707745" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.807051] env[69027]: DEBUG nova.network.neutron [req-75b2d517-116a-419b-a50d-ccb3b5f5f394 req-bbc48dc4-97f9-4106-9e7c-79a5f7db9dce service nova] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Refreshing network info cache for port ad41719c-1897-4542-bd65-ae8ffb70e107 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1078.325106] env[69027]: DEBUG nova.network.neutron [req-75b2d517-116a-419b-a50d-ccb3b5f5f394 req-bbc48dc4-97f9-4106-9e7c-79a5f7db9dce service nova] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Updated VIF entry in instance network info cache for port ad41719c-1897-4542-bd65-ae8ffb70e107. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1078.325106] env[69027]: DEBUG nova.network.neutron [req-75b2d517-116a-419b-a50d-ccb3b5f5f394 req-bbc48dc4-97f9-4106-9e7c-79a5f7db9dce service nova] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Updating instance_info_cache with network_info: [{"id": "ad41719c-1897-4542-bd65-ae8ffb70e107", "address": "fa:16:3e:67:d3:8a", "network": {"id": "404b4355-dae6-4ce3-a75a-51ccc2099a23", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-164317621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78a13bae334a4ef7959f0d408926ca33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad41719c-18", "ovs_interfaceid": "ad41719c-1897-4542-bd65-ae8ffb70e107", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.336137] env[69027]: DEBUG oslo_concurrency.lockutils [req-75b2d517-116a-419b-a50d-ccb3b5f5f394 req-bbc48dc4-97f9-4106-9e7c-79a5f7db9dce service nova] Releasing lock "refresh_cache-bf4c80b4-bc0c-4198-9010-74fc50707745" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.770867] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.771145] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.771320] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.771509] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1080.176387] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquiring lock "edc3a0ff-c592-47b8-9753-1b4831bee576" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.176674] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Lock "edc3a0ff-c592-47b8-9753-1b4831bee576" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.062045] env[69027]: DEBUG oslo_concurrency.lockutils [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "bf4c80b4-bc0c-4198-9010-74fc50707745" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.018865] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Acquiring lock "54340994-037e-4255-b32b-18d8784733c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.019145] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Lock "54340994-037e-4255-b32b-18d8784733c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.778468] env[69027]: WARNING oslo_vmware.rw_handles [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1119.778468] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1119.778468] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1119.778468] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1119.778468] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1119.778468] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1119.778468] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1119.778468] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1119.778468] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1119.778468] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1119.778468] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1119.778468] env[69027]: ERROR oslo_vmware.rw_handles [ 1119.779379] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/b3b8ae3e-fb54-4c31-956e-f4932617d82a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1119.780810] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1119.781071] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Copying Virtual Disk [datastore2] vmware_temp/b3b8ae3e-fb54-4c31-956e-f4932617d82a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/b3b8ae3e-fb54-4c31-956e-f4932617d82a/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1119.781378] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-489c6936-a527-465d-8ba5-3168617aa605 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.789014] env[69027]: DEBUG oslo_vmware.api [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 1119.789014] env[69027]: value = "task-3395153" [ 1119.789014] env[69027]: _type = "Task" [ 1119.789014] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.796639] env[69027]: DEBUG oslo_vmware.api [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': task-3395153, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.298887] env[69027]: DEBUG oslo_vmware.exceptions [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1120.299191] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1120.299758] env[69027]: ERROR nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1120.299758] env[69027]: Faults: ['InvalidArgument'] [ 1120.299758] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Traceback (most recent call last): [ 1120.299758] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1120.299758] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] yield resources [ 1120.299758] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1120.299758] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] self.driver.spawn(context, instance, image_meta, [ 1120.299758] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1120.299758] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1120.299758] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1120.299758] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] self._fetch_image_if_missing(context, vi) [ 1120.299758] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] image_cache(vi, tmp_image_ds_loc) [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] vm_util.copy_virtual_disk( [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] session._wait_for_task(vmdk_copy_task) [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] return self.wait_for_task(task_ref) [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] return evt.wait() [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] result = hub.switch() [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1120.300156] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] return self.greenlet.switch() [ 1120.300615] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1120.300615] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] self.f(*self.args, **self.kw) [ 1120.300615] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1120.300615] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] raise exceptions.translate_fault(task_info.error) [ 1120.300615] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1120.300615] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Faults: ['InvalidArgument'] [ 1120.300615] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] [ 1120.300615] env[69027]: INFO nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Terminating instance [ 1120.301657] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.301867] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1120.302169] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98172eef-a712-4d1e-8ec1-dfac4060667d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.304232] env[69027]: DEBUG nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1120.304450] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1120.305143] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3af8396-5c96-444c-8185-94ea3104b761 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.311454] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1120.311659] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8d5b6cc-aeac-48a3-9846-250e5c591cdc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.313645] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1120.313818] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1120.314733] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2b6646b-a025-47dd-8d14-f10ddb196eef {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.319712] env[69027]: DEBUG oslo_vmware.api [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for the task: (returnval){ [ 1120.319712] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52401083-e891-1f86-d025-7a38dd68469e" [ 1120.319712] env[69027]: _type = "Task" [ 1120.319712] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.326666] env[69027]: DEBUG oslo_vmware.api [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52401083-e891-1f86-d025-7a38dd68469e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.385103] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1120.385321] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1120.385502] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Deleting the datastore file [datastore2] 339bab90-238a-47ab-89f5-1ff9541ec14d {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1120.385752] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21ff4c14-badc-4f99-9788-4ba8aab34dc8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.392305] env[69027]: DEBUG oslo_vmware.api [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 1120.392305] env[69027]: value = "task-3395155" [ 1120.392305] env[69027]: _type = "Task" [ 1120.392305] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.399615] env[69027]: DEBUG oslo_vmware.api [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': task-3395155, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.830429] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1120.830790] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Creating directory with path [datastore2] vmware_temp/6f83f5b5-01d3-46fc-9357-6247d6099387/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1120.830912] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98567894-62cd-4b4f-80d6-d4407d7f7dcf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.841673] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Created directory with path [datastore2] vmware_temp/6f83f5b5-01d3-46fc-9357-6247d6099387/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1120.841858] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Fetch image to [datastore2] vmware_temp/6f83f5b5-01d3-46fc-9357-6247d6099387/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1120.842065] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/6f83f5b5-01d3-46fc-9357-6247d6099387/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1120.842747] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d34643e-7e52-4fbd-9409-71035a431f92 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.848781] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae42ab5-e2c3-4f11-bbd6-04842ea9e9b2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.857311] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268c2d96-8605-4e1d-a5af-28638b247c37 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.890721] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f00f4bf-5329-43fb-ab01-53d819453c0c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.901440] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2c68fce7-c965-43d9-81f9-d6b9dfc1910e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.903053] env[69027]: DEBUG oslo_vmware.api [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': task-3395155, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071944} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.903307] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1120.903487] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1120.903656] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1120.903923] env[69027]: INFO nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1120.906463] env[69027]: DEBUG nova.compute.claims [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1120.906632] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.906843] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.927620] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1120.981145] env[69027]: DEBUG oslo_vmware.rw_handles [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6f83f5b5-01d3-46fc-9357-6247d6099387/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1121.043738] env[69027]: DEBUG oslo_vmware.rw_handles [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1121.043976] env[69027]: DEBUG oslo_vmware.rw_handles [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6f83f5b5-01d3-46fc-9357-6247d6099387/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1121.300100] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f34694-3539-4a0c-8559-2f9b974328ea {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.306150] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d8bf53-4760-4ec0-897c-9f880fa733bf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.336285] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ea58f3-63cb-4247-bd9f-e0bb02e012de {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.343413] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521f19d2-7b49-41b1-84b1-15871c9bff7a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.356219] env[69027]: DEBUG nova.compute.provider_tree [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1121.373265] env[69027]: DEBUG nova.scheduler.client.report [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1121.392490] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.485s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.393100] env[69027]: ERROR nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1121.393100] env[69027]: Faults: ['InvalidArgument'] [ 1121.393100] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Traceback (most recent call last): [ 1121.393100] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1121.393100] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] self.driver.spawn(context, instance, image_meta, [ 1121.393100] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1121.393100] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1121.393100] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1121.393100] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] self._fetch_image_if_missing(context, vi) [ 1121.393100] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1121.393100] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] image_cache(vi, tmp_image_ds_loc) [ 1121.393100] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] vm_util.copy_virtual_disk( [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] session._wait_for_task(vmdk_copy_task) [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] return self.wait_for_task(task_ref) [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] return evt.wait() [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] result = hub.switch() [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] return self.greenlet.switch() [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1121.393427] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] self.f(*self.args, **self.kw) [ 1121.393737] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1121.393737] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] raise exceptions.translate_fault(task_info.error) [ 1121.393737] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1121.393737] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Faults: ['InvalidArgument'] [ 1121.393737] env[69027]: ERROR nova.compute.manager [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] [ 1121.393872] env[69027]: DEBUG nova.compute.utils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1121.395223] env[69027]: DEBUG nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Build of instance 339bab90-238a-47ab-89f5-1ff9541ec14d was re-scheduled: A specified parameter was not correct: fileType [ 1121.395223] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1121.395607] env[69027]: DEBUG nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1121.395807] env[69027]: DEBUG nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1121.395985] env[69027]: DEBUG nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1121.396164] env[69027]: DEBUG nova.network.neutron [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1121.851446] env[69027]: DEBUG nova.network.neutron [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.879435] env[69027]: INFO nova.compute.manager [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Took 0.48 seconds to deallocate network for instance. [ 1122.045903] env[69027]: INFO nova.scheduler.client.report [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Deleted allocations for instance 339bab90-238a-47ab-89f5-1ff9541ec14d [ 1122.086408] env[69027]: DEBUG oslo_concurrency.lockutils [None req-156cc9f8-810b-4864-9358-f746f22c1d1d tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "339bab90-238a-47ab-89f5-1ff9541ec14d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 526.991s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.087626] env[69027]: DEBUG oslo_concurrency.lockutils [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "339bab90-238a-47ab-89f5-1ff9541ec14d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 127.620s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.087873] env[69027]: DEBUG oslo_concurrency.lockutils [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "339bab90-238a-47ab-89f5-1ff9541ec14d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.088095] env[69027]: DEBUG oslo_concurrency.lockutils [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "339bab90-238a-47ab-89f5-1ff9541ec14d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.088269] env[69027]: DEBUG oslo_concurrency.lockutils [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "339bab90-238a-47ab-89f5-1ff9541ec14d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.090632] env[69027]: INFO nova.compute.manager [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Terminating instance [ 1122.092397] env[69027]: DEBUG nova.compute.manager [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1122.092552] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1122.093034] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f4913960-05f5-47a7-9776-e4fc7a0850f7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.101892] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae31a0a-a17e-44ec-b84c-508fe27f9e8d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.113042] env[69027]: DEBUG nova.compute.manager [None req-c6136b5c-beae-41ce-bb21-008b9561b9c8 tempest-ImagesOneServerNegativeTestJSON-1086940354 tempest-ImagesOneServerNegativeTestJSON-1086940354-project-member] [instance: 84e96a3a-fbb3-4a54-b5c2-c0c463b9e8bf] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1122.134419] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 339bab90-238a-47ab-89f5-1ff9541ec14d could not be found. [ 1122.134628] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1122.134810] env[69027]: INFO nova.compute.manager [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1122.135327] env[69027]: DEBUG oslo.service.loopingcall [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1122.135405] env[69027]: DEBUG nova.compute.manager [-] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1122.135486] env[69027]: DEBUG nova.network.neutron [-] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1122.156065] env[69027]: DEBUG nova.compute.manager [None req-c6136b5c-beae-41ce-bb21-008b9561b9c8 tempest-ImagesOneServerNegativeTestJSON-1086940354 tempest-ImagesOneServerNegativeTestJSON-1086940354-project-member] [instance: 84e96a3a-fbb3-4a54-b5c2-c0c463b9e8bf] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1122.182294] env[69027]: DEBUG nova.network.neutron [-] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.187246] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c6136b5c-beae-41ce-bb21-008b9561b9c8 tempest-ImagesOneServerNegativeTestJSON-1086940354 tempest-ImagesOneServerNegativeTestJSON-1086940354-project-member] Lock "84e96a3a-fbb3-4a54-b5c2-c0c463b9e8bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.496s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.191456] env[69027]: INFO nova.compute.manager [-] [instance: 339bab90-238a-47ab-89f5-1ff9541ec14d] Took 0.06 seconds to deallocate network for instance. [ 1122.315514] env[69027]: DEBUG nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1122.415676] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.415943] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.417439] env[69027]: INFO nova.compute.claims [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1122.470859] env[69027]: DEBUG oslo_concurrency.lockutils [None req-982bed29-1651-4279-a43d-de20b8b30dea tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "339bab90-238a-47ab-89f5-1ff9541ec14d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.383s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.734515] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a71c5a-33e3-4d36-9755-e651f899900c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.742266] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f711c1-8390-4b3c-9fa4-a18332b17b2c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.771292] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8551a5-ba21-4d3c-aa4b-4b20f66aae0a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.778172] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57215549-8771-4b2b-adb8-f545aafd7eef {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.791214] env[69027]: DEBUG nova.compute.provider_tree [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.801893] env[69027]: DEBUG nova.scheduler.client.report [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1122.817517] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.401s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.818040] env[69027]: DEBUG nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1122.867798] env[69027]: DEBUG nova.compute.utils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1122.869129] env[69027]: DEBUG nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1122.869408] env[69027]: DEBUG nova.network.neutron [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1122.895673] env[69027]: DEBUG nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1122.989141] env[69027]: DEBUG nova.policy [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec4410b37a464672bb924564b24a02c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08b65d39d1924efc9d2dbd4ed09e43e7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1122.992714] env[69027]: DEBUG nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1123.037619] env[69027]: DEBUG nova.virt.hardware [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:40:51Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='961db9ab-6400-4e79-bdd5-740f24af337b',id=38,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-1444339613',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1123.037874] env[69027]: DEBUG nova.virt.hardware [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1123.038042] env[69027]: DEBUG nova.virt.hardware [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1123.038226] env[69027]: DEBUG nova.virt.hardware [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1123.038373] env[69027]: DEBUG nova.virt.hardware [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1123.038519] env[69027]: DEBUG nova.virt.hardware [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1123.038726] env[69027]: DEBUG nova.virt.hardware [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1123.038882] env[69027]: DEBUG nova.virt.hardware [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1123.039059] env[69027]: DEBUG nova.virt.hardware [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1123.039225] env[69027]: DEBUG nova.virt.hardware [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1123.039399] env[69027]: DEBUG nova.virt.hardware [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1123.040355] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c2d75f-efe1-4280-9b89-24a5a5d361dd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.048890] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a278f91-9e0c-4429-adae-99c2a934f1af {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.457118] env[69027]: DEBUG nova.network.neutron [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Successfully created port: 5ab94873-51d3-401f-a34d-b1e6934343c6 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1124.211014] env[69027]: DEBUG nova.compute.manager [req-f6837774-8f04-4c4d-a56f-f8d38cb51920 req-534811af-4f36-437c-8f41-83f7e585dced service nova] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Received event network-vif-plugged-5ab94873-51d3-401f-a34d-b1e6934343c6 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1124.211014] env[69027]: DEBUG oslo_concurrency.lockutils [req-f6837774-8f04-4c4d-a56f-f8d38cb51920 req-534811af-4f36-437c-8f41-83f7e585dced service nova] Acquiring lock "90a6375b-4834-406d-abd5-5cf47b7cfc12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.211014] env[69027]: DEBUG oslo_concurrency.lockutils [req-f6837774-8f04-4c4d-a56f-f8d38cb51920 req-534811af-4f36-437c-8f41-83f7e585dced service nova] Lock "90a6375b-4834-406d-abd5-5cf47b7cfc12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.211014] env[69027]: DEBUG oslo_concurrency.lockutils [req-f6837774-8f04-4c4d-a56f-f8d38cb51920 req-534811af-4f36-437c-8f41-83f7e585dced service nova] Lock "90a6375b-4834-406d-abd5-5cf47b7cfc12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.211342] env[69027]: DEBUG nova.compute.manager [req-f6837774-8f04-4c4d-a56f-f8d38cb51920 req-534811af-4f36-437c-8f41-83f7e585dced service nova] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] No waiting events found dispatching network-vif-plugged-5ab94873-51d3-401f-a34d-b1e6934343c6 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1124.211342] env[69027]: WARNING nova.compute.manager [req-f6837774-8f04-4c4d-a56f-f8d38cb51920 req-534811af-4f36-437c-8f41-83f7e585dced service nova] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Received unexpected event network-vif-plugged-5ab94873-51d3-401f-a34d-b1e6934343c6 for instance with vm_state building and task_state spawning. [ 1124.290805] env[69027]: DEBUG nova.network.neutron [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Successfully updated port: 5ab94873-51d3-401f-a34d-b1e6934343c6 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1124.328809] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "refresh_cache-90a6375b-4834-406d-abd5-5cf47b7cfc12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1124.329327] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquired lock "refresh_cache-90a6375b-4834-406d-abd5-5cf47b7cfc12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.329568] env[69027]: DEBUG nova.network.neutron [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1124.372349] env[69027]: DEBUG nova.network.neutron [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1124.593089] env[69027]: DEBUG nova.network.neutron [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Updating instance_info_cache with network_info: [{"id": "5ab94873-51d3-401f-a34d-b1e6934343c6", "address": "fa:16:3e:34:83:18", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.67", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ab94873-51", "ovs_interfaceid": "5ab94873-51d3-401f-a34d-b1e6934343c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.611055] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Releasing lock "refresh_cache-90a6375b-4834-406d-abd5-5cf47b7cfc12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1124.611371] env[69027]: DEBUG nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Instance network_info: |[{"id": "5ab94873-51d3-401f-a34d-b1e6934343c6", "address": "fa:16:3e:34:83:18", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.67", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ab94873-51", "ovs_interfaceid": "5ab94873-51d3-401f-a34d-b1e6934343c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1124.611784] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:83:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78fd2e0c-4fd2-4d81-8780-aa94237670c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5ab94873-51d3-401f-a34d-b1e6934343c6', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1124.620514] env[69027]: DEBUG oslo.service.loopingcall [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1124.621340] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1124.621616] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d5dc6ea-c06b-43f6-a7d9-4acf831d72c3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.646022] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1124.646022] env[69027]: value = "task-3395156" [ 1124.646022] env[69027]: _type = "Task" [ 1124.646022] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.654452] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395156, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.156890] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395156, 'name': CreateVM_Task, 'duration_secs': 0.280749} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.157423] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1125.157876] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1125.158669] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.158669] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1125.158669] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ad57304-f9dd-40c9-a27f-d35b708e7b6d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.163974] env[69027]: DEBUG oslo_vmware.api [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Waiting for the task: (returnval){ [ 1125.163974] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5257065b-83b2-c433-2fad-d835dbc4f868" [ 1125.163974] env[69027]: _type = "Task" [ 1125.163974] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.173255] env[69027]: DEBUG oslo_vmware.api [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5257065b-83b2-c433-2fad-d835dbc4f868, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.675585] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1125.676368] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1125.676730] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.235625] env[69027]: DEBUG nova.compute.manager [req-84cc08cd-7e64-44ea-9938-359780e09616 req-5b308b2c-3dae-4f80-8bd0-3f5d30625096 service nova] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Received event network-changed-5ab94873-51d3-401f-a34d-b1e6934343c6 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1126.235826] env[69027]: DEBUG nova.compute.manager [req-84cc08cd-7e64-44ea-9938-359780e09616 req-5b308b2c-3dae-4f80-8bd0-3f5d30625096 service nova] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Refreshing instance network info cache due to event network-changed-5ab94873-51d3-401f-a34d-b1e6934343c6. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1126.236286] env[69027]: DEBUG oslo_concurrency.lockutils [req-84cc08cd-7e64-44ea-9938-359780e09616 req-5b308b2c-3dae-4f80-8bd0-3f5d30625096 service nova] Acquiring lock "refresh_cache-90a6375b-4834-406d-abd5-5cf47b7cfc12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1126.236536] env[69027]: DEBUG oslo_concurrency.lockutils [req-84cc08cd-7e64-44ea-9938-359780e09616 req-5b308b2c-3dae-4f80-8bd0-3f5d30625096 service nova] Acquired lock "refresh_cache-90a6375b-4834-406d-abd5-5cf47b7cfc12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.236766] env[69027]: DEBUG nova.network.neutron [req-84cc08cd-7e64-44ea-9938-359780e09616 req-5b308b2c-3dae-4f80-8bd0-3f5d30625096 service nova] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Refreshing network info cache for port 5ab94873-51d3-401f-a34d-b1e6934343c6 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1127.535591] env[69027]: DEBUG nova.network.neutron [req-84cc08cd-7e64-44ea-9938-359780e09616 req-5b308b2c-3dae-4f80-8bd0-3f5d30625096 service nova] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Updated VIF entry in instance network info cache for port 5ab94873-51d3-401f-a34d-b1e6934343c6. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1127.535957] env[69027]: DEBUG nova.network.neutron [req-84cc08cd-7e64-44ea-9938-359780e09616 req-5b308b2c-3dae-4f80-8bd0-3f5d30625096 service nova] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Updating instance_info_cache with network_info: [{"id": "5ab94873-51d3-401f-a34d-b1e6934343c6", "address": "fa:16:3e:34:83:18", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.67", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5ab94873-51", "ovs_interfaceid": "5ab94873-51d3-401f-a34d-b1e6934343c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.545563] env[69027]: DEBUG oslo_concurrency.lockutils [req-84cc08cd-7e64-44ea-9938-359780e09616 req-5b308b2c-3dae-4f80-8bd0-3f5d30625096 service nova] Releasing lock "refresh_cache-90a6375b-4834-406d-abd5-5cf47b7cfc12" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1127.951952] env[69027]: DEBUG oslo_concurrency.lockutils [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "4ed95b65-233e-406e-8d27-2a5cd2694184" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.359703] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.359939] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1131.767110] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.771824] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1132.790846] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.791159] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1132.791361] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1132.791523] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1132.792726] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b622a5a3-afd1-436e-b4fd-f1bb5caf8ca9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.801432] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef6864a-ba15-45f4-8235-9b31c70615e0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.814940] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f596c9f4-8e70-4f2c-a8f9-90a2a1a5452f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.821247] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cca8319-8c96-48b2-ae7f-6734f44a7750 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.851150] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180959MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1132.851302] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1132.851494] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.143431] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b6a38a84-0b95-494c-a423-3360824ed8d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.143610] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.143739] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.143861] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.143981] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ed95b65-233e-406e-8d27-2a5cd2694184 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.144115] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbd6a238-1662-4c22-86ab-d31d4bb82734 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.144234] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.144353] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.144467] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bf4c80b4-bc0c-4198-9010-74fc50707745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.144602] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1133.155920] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a907f1ab-3540-4bc0-8389-005233cca940 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.166441] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1715faa2-86ea-49f9-a993-1003aea54384 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.175711] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 5401d655-86da-41b0-9d29-3ba25d21f1ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.186217] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 16b7e547-8dc8-4305-8ff6-64736fc9cbb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.196962] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f4d49026-8558-44c7-b475-215eecba4e09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.208882] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fb7d7808-9768-4882-9405-0d07c41509fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.221895] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c39e4257-7526-461e-ad95-91defe4d51ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.231167] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance e6224db8-1a05-4832-95be-7231fda105f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.243884] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03e2d14d-9195-4ee5-b2e0-05b803dcfefc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.296588] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 45296842-f415-42eb-b67e-096465650c09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.311066] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 34a1076e-6a17-442d-8a71-1d49117edad5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.329346] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance edc3a0ff-c592-47b8-9753-1b4831bee576 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.341744] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 54340994-037e-4255-b32b-18d8784733c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.354499] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1133.354771] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1133.354919] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1133.686164] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0b7c2d-3e03-4d94-a27c-a4fba75dac16 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.693978] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eabb547d-49de-43de-bf9d-8b8f9ac733af {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.723736] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2809dc0-e6be-4998-aaf4-7eba31594f2b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.731076] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4baa5c0-33f6-4f0f-9f36-9b01d035154e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.744051] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.753165] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1133.770238] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1133.770447] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.919s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.770742] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.770895] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances with incomplete migration {{(pid=69027) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1136.777584] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.777860] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1136.777907] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1136.800025] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1136.800025] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1136.800025] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1136.800025] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1136.800025] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1136.800308] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1136.800308] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1136.800384] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1136.800462] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1136.800574] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1136.800687] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1136.801192] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1136.801365] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.770915] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1137.771211] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.776576] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.771578] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.771812] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.771987] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.772145] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1141.772518] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.772518] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1141.780240] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] There are 0 instances to clean {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1142.032864] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.052616] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Getting list of instances from cluster (obj){ [ 1142.052616] env[69027]: value = "domain-c8" [ 1142.052616] env[69027]: _type = "ClusterComputeResource" [ 1142.052616] env[69027]: } {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1142.054870] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25c38a7-687c-4a78-82a3-203a9bbcc374 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.071438] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Got total of 10 instances {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1142.071606] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid b6a38a84-0b95-494c-a423-3360824ed8d3 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1142.071799] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1142.071957] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1142.072122] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1142.072273] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 4ed95b65-233e-406e-8d27-2a5cd2694184 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1142.072455] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid fbd6a238-1662-4c22-86ab-d31d4bb82734 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1142.072611] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1142.072756] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid c099867e-d9e3-43a4-b2cb-568270d4aa6b {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1142.072900] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid bf4c80b4-bc0c-4198-9010-74fc50707745 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1142.073055] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 90a6375b-4834-406d-abd5-5cf47b7cfc12 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1142.073385] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "b6a38a84-0b95-494c-a423-3360824ed8d3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.073669] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.073882] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.074098] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.074300] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "4ed95b65-233e-406e-8d27-2a5cd2694184" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.074494] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "fbd6a238-1662-4c22-86ab-d31d4bb82734" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.074690] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.074875] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.075079] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "bf4c80b4-bc0c-4198-9010-74fc50707745" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.075496] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "90a6375b-4834-406d-abd5-5cf47b7cfc12" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1169.422299] env[69027]: WARNING oslo_vmware.rw_handles [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1169.422299] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1169.422299] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1169.422299] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1169.422299] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1169.422299] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1169.422299] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1169.422299] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1169.422299] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1169.422299] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1169.422299] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1169.422299] env[69027]: ERROR oslo_vmware.rw_handles [ 1169.422967] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/6f83f5b5-01d3-46fc-9357-6247d6099387/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1169.425515] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1169.425643] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Copying Virtual Disk [datastore2] vmware_temp/6f83f5b5-01d3-46fc-9357-6247d6099387/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/6f83f5b5-01d3-46fc-9357-6247d6099387/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1169.425879] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-749033da-bebb-443f-898b-edb05b504037 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.434240] env[69027]: DEBUG oslo_vmware.api [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for the task: (returnval){ [ 1169.434240] env[69027]: value = "task-3395157" [ 1169.434240] env[69027]: _type = "Task" [ 1169.434240] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.442051] env[69027]: DEBUG oslo_vmware.api [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Task: {'id': task-3395157, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.945289] env[69027]: DEBUG oslo_vmware.exceptions [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1169.945574] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1169.946146] env[69027]: ERROR nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1169.946146] env[69027]: Faults: ['InvalidArgument'] [ 1169.946146] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Traceback (most recent call last): [ 1169.946146] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1169.946146] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] yield resources [ 1169.946146] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1169.946146] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] self.driver.spawn(context, instance, image_meta, [ 1169.946146] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1169.946146] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1169.946146] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1169.946146] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] self._fetch_image_if_missing(context, vi) [ 1169.946146] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] image_cache(vi, tmp_image_ds_loc) [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] vm_util.copy_virtual_disk( [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] session._wait_for_task(vmdk_copy_task) [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] return self.wait_for_task(task_ref) [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] return evt.wait() [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] result = hub.switch() [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1169.946681] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] return self.greenlet.switch() [ 1169.947133] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1169.947133] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] self.f(*self.args, **self.kw) [ 1169.947133] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1169.947133] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] raise exceptions.translate_fault(task_info.error) [ 1169.947133] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1169.947133] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Faults: ['InvalidArgument'] [ 1169.947133] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] [ 1169.947133] env[69027]: INFO nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Terminating instance [ 1169.948878] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.949153] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1169.949787] env[69027]: DEBUG nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1169.949978] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1169.950219] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3fa76ae-7217-4045-b350-84518233c5d7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.952521] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ee180a-011a-4cbf-999e-7088f1516f25 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.959754] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1169.959988] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e63c80c-bb9d-4369-86e8-86bc8af8a195 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.962162] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1169.962342] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1169.963315] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d7d944d-7c02-4364-ad0f-a6891be68e32 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.968092] env[69027]: DEBUG oslo_vmware.api [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 1169.968092] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52af9946-d99e-0cc5-8917-2ffce1cd298e" [ 1169.968092] env[69027]: _type = "Task" [ 1169.968092] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.976414] env[69027]: DEBUG oslo_vmware.api [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52af9946-d99e-0cc5-8917-2ffce1cd298e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.030050] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1170.030214] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1170.030363] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Deleting the datastore file [datastore2] b6a38a84-0b95-494c-a423-3360824ed8d3 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1170.030657] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4cf38475-4a4a-40af-9712-fc9ea4d94452 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.036934] env[69027]: DEBUG oslo_vmware.api [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for the task: (returnval){ [ 1170.036934] env[69027]: value = "task-3395159" [ 1170.036934] env[69027]: _type = "Task" [ 1170.036934] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.044299] env[69027]: DEBUG oslo_vmware.api [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Task: {'id': task-3395159, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.479352] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1170.479686] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Creating directory with path [datastore2] vmware_temp/1c8e6ea9-d368-47d2-9389-f5360f9ab800/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1170.479843] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d273ba3f-43b0-4711-a29c-491415edd59a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.491146] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Created directory with path [datastore2] vmware_temp/1c8e6ea9-d368-47d2-9389-f5360f9ab800/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1170.491351] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Fetch image to [datastore2] vmware_temp/1c8e6ea9-d368-47d2-9389-f5360f9ab800/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1170.491518] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/1c8e6ea9-d368-47d2-9389-f5360f9ab800/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1170.492296] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70d49e9-200e-4721-be07-be693005c9fd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.499064] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654217ed-0dbb-4b35-9703-9e5d19326a1e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.509123] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c71ae77-7ce1-4fac-8e44-df72e69db643 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.541733] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28bc625-8442-47d1-bc7f-63990eadf15d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.549036] env[69027]: DEBUG oslo_vmware.api [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Task: {'id': task-3395159, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079052} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.550056] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1170.550256] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1170.550428] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1170.550598] env[69027]: INFO nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1170.553006] env[69027]: DEBUG nova.compute.claims [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1170.553199] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1170.553427] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1170.556690] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f8396e48-e435-464d-afea-9652b21b1e6b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.577224] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1170.617692] env[69027]: DEBUG nova.scheduler.client.report [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Refreshing inventories for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1170.631604] env[69027]: DEBUG oslo_vmware.rw_handles [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1c8e6ea9-d368-47d2-9389-f5360f9ab800/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1170.634296] env[69027]: DEBUG nova.scheduler.client.report [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Updating ProviderTree inventory for provider 4923c91f-3b2b-4ad1-a821-36209acae639 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1170.634507] env[69027]: DEBUG nova.compute.provider_tree [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Updating inventory in ProviderTree for provider 4923c91f-3b2b-4ad1-a821-36209acae639 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1170.694701] env[69027]: DEBUG oslo_vmware.rw_handles [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1170.694910] env[69027]: DEBUG oslo_vmware.rw_handles [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1c8e6ea9-d368-47d2-9389-f5360f9ab800/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1170.700052] env[69027]: DEBUG nova.scheduler.client.report [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Refreshing aggregate associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, aggregates: None {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1170.718940] env[69027]: DEBUG nova.scheduler.client.report [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Refreshing trait associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1170.976053] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c31044f-3af2-4292-b3bb-e538afa46388 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.982207] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85e9d4b-56c4-48ac-8841-37f96dd5821b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.011685] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f695ecb-8662-4ea3-83bd-e0f74b870e19 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.018831] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b046f121-3207-4345-a641-9c1411f5d746 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.031486] env[69027]: DEBUG nova.compute.provider_tree [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1171.045019] env[69027]: DEBUG nova.scheduler.client.report [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1171.061480] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.508s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.061997] env[69027]: ERROR nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1171.061997] env[69027]: Faults: ['InvalidArgument'] [ 1171.061997] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Traceback (most recent call last): [ 1171.061997] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1171.061997] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] self.driver.spawn(context, instance, image_meta, [ 1171.061997] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1171.061997] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1171.061997] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1171.061997] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] self._fetch_image_if_missing(context, vi) [ 1171.061997] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1171.061997] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] image_cache(vi, tmp_image_ds_loc) [ 1171.061997] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] vm_util.copy_virtual_disk( [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] session._wait_for_task(vmdk_copy_task) [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] return self.wait_for_task(task_ref) [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] return evt.wait() [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] result = hub.switch() [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] return self.greenlet.switch() [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1171.062417] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] self.f(*self.args, **self.kw) [ 1171.062845] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1171.062845] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] raise exceptions.translate_fault(task_info.error) [ 1171.062845] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1171.062845] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Faults: ['InvalidArgument'] [ 1171.062845] env[69027]: ERROR nova.compute.manager [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] [ 1171.062845] env[69027]: DEBUG nova.compute.utils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1171.064554] env[69027]: DEBUG nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Build of instance b6a38a84-0b95-494c-a423-3360824ed8d3 was re-scheduled: A specified parameter was not correct: fileType [ 1171.064554] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1171.064936] env[69027]: DEBUG nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1171.065130] env[69027]: DEBUG nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1171.065306] env[69027]: DEBUG nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1171.065471] env[69027]: DEBUG nova.network.neutron [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1171.426897] env[69027]: DEBUG nova.network.neutron [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.438011] env[69027]: INFO nova.compute.manager [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Took 0.37 seconds to deallocate network for instance. [ 1171.531012] env[69027]: INFO nova.scheduler.client.report [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Deleted allocations for instance b6a38a84-0b95-494c-a423-3360824ed8d3 [ 1171.551144] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aa71c876-5878-4a1c-93cd-4d0105c5c9f4 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "b6a38a84-0b95-494c-a423-3360824ed8d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 573.372s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.552294] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "b6a38a84-0b95-494c-a423-3360824ed8d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 374.016s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.552516] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Acquiring lock "b6a38a84-0b95-494c-a423-3360824ed8d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.552720] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "b6a38a84-0b95-494c-a423-3360824ed8d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.552980] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "b6a38a84-0b95-494c-a423-3360824ed8d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.555150] env[69027]: INFO nova.compute.manager [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Terminating instance [ 1171.556798] env[69027]: DEBUG nova.compute.manager [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1171.557449] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1171.557562] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf260325-0330-4b85-b39d-4e83d2a61212 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.566810] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079e921d-ae80-435e-bb9f-1359ae9c6f28 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.576993] env[69027]: DEBUG nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1171.597757] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b6a38a84-0b95-494c-a423-3360824ed8d3 could not be found. [ 1171.597941] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1171.598139] env[69027]: INFO nova.compute.manager [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1171.598382] env[69027]: DEBUG oslo.service.loopingcall [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1171.598592] env[69027]: DEBUG nova.compute.manager [-] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1171.598686] env[69027]: DEBUG nova.network.neutron [-] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1171.622413] env[69027]: DEBUG nova.network.neutron [-] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.629344] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.629706] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.631108] env[69027]: INFO nova.compute.claims [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1171.635337] env[69027]: INFO nova.compute.manager [-] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] Took 0.04 seconds to deallocate network for instance. [ 1171.737154] env[69027]: DEBUG oslo_concurrency.lockutils [None req-7ceca769-5dc8-41d9-b15f-897369860722 tempest-ServersAdminTestJSON-1591918510 tempest-ServersAdminTestJSON-1591918510-project-member] Lock "b6a38a84-0b95-494c-a423-3360824ed8d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.737634] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "b6a38a84-0b95-494c-a423-3360824ed8d3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 29.664s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.737837] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b6a38a84-0b95-494c-a423-3360824ed8d3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1171.738030] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "b6a38a84-0b95-494c-a423-3360824ed8d3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.958579] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d8913a-1682-41c8-9137-d90ef4bb6409 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.965971] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afeea0bb-eabe-4e3c-a9c0-7923c1228df5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.995319] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4514937-74b3-4e1c-b905-9f25299a124e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.004037] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff873a78-945a-46ef-aac2-88e8244392d5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.016167] env[69027]: DEBUG nova.compute.provider_tree [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.024712] env[69027]: DEBUG nova.scheduler.client.report [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1172.039861] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.410s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1172.040356] env[69027]: DEBUG nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1172.076919] env[69027]: DEBUG nova.compute.utils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1172.078150] env[69027]: DEBUG nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1172.078785] env[69027]: DEBUG nova.network.neutron [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1172.087904] env[69027]: DEBUG nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1172.146334] env[69027]: DEBUG nova.policy [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '56037359ca2045c4aac48662dfd7477a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '887eab30aaec49068e3ddf5d768a313c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1172.149357] env[69027]: DEBUG nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1172.173571] env[69027]: DEBUG nova.virt.hardware [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1172.173814] env[69027]: DEBUG nova.virt.hardware [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1172.174012] env[69027]: DEBUG nova.virt.hardware [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1172.174223] env[69027]: DEBUG nova.virt.hardware [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1172.174374] env[69027]: DEBUG nova.virt.hardware [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1172.174520] env[69027]: DEBUG nova.virt.hardware [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1172.174727] env[69027]: DEBUG nova.virt.hardware [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1172.174884] env[69027]: DEBUG nova.virt.hardware [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1172.175063] env[69027]: DEBUG nova.virt.hardware [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1172.175234] env[69027]: DEBUG nova.virt.hardware [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1172.175401] env[69027]: DEBUG nova.virt.hardware [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1172.176235] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad1a1b0-182d-4b68-ba1e-d1eccafed841 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.184024] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f743e0-685f-4c14-bbcf-8495bfcb6d11 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.489553] env[69027]: DEBUG nova.network.neutron [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Successfully created port: a2bc1d43-dd5e-4fec-8678-dbab4148063d {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1173.199643] env[69027]: DEBUG nova.network.neutron [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Successfully updated port: a2bc1d43-dd5e-4fec-8678-dbab4148063d {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1173.213214] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "refresh_cache-a907f1ab-3540-4bc0-8389-005233cca940" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1173.213214] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquired lock "refresh_cache-a907f1ab-3540-4bc0-8389-005233cca940" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.213214] env[69027]: DEBUG nova.network.neutron [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1173.251640] env[69027]: DEBUG nova.network.neutron [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1173.492434] env[69027]: DEBUG nova.network.neutron [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Updating instance_info_cache with network_info: [{"id": "a2bc1d43-dd5e-4fec-8678-dbab4148063d", "address": "fa:16:3e:03:d7:06", "network": {"id": "1a9f38e8-1c72-43ff-9c4d-cde922e23d6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-938644940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "887eab30aaec49068e3ddf5d768a313c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2bc1d43-dd", "ovs_interfaceid": "a2bc1d43-dd5e-4fec-8678-dbab4148063d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.502524] env[69027]: DEBUG nova.compute.manager [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Received event network-vif-plugged-a2bc1d43-dd5e-4fec-8678-dbab4148063d {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1173.502743] env[69027]: DEBUG oslo_concurrency.lockutils [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] Acquiring lock "a907f1ab-3540-4bc0-8389-005233cca940-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1173.503010] env[69027]: DEBUG oslo_concurrency.lockutils [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] Lock "a907f1ab-3540-4bc0-8389-005233cca940-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.503460] env[69027]: DEBUG oslo_concurrency.lockutils [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] Lock "a907f1ab-3540-4bc0-8389-005233cca940-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1173.503460] env[69027]: DEBUG nova.compute.manager [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] [instance: a907f1ab-3540-4bc0-8389-005233cca940] No waiting events found dispatching network-vif-plugged-a2bc1d43-dd5e-4fec-8678-dbab4148063d {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1173.503719] env[69027]: WARNING nova.compute.manager [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Received unexpected event network-vif-plugged-a2bc1d43-dd5e-4fec-8678-dbab4148063d for instance with vm_state building and task_state spawning. [ 1173.503719] env[69027]: DEBUG nova.compute.manager [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Received event network-changed-a2bc1d43-dd5e-4fec-8678-dbab4148063d {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1173.503829] env[69027]: DEBUG nova.compute.manager [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Refreshing instance network info cache due to event network-changed-a2bc1d43-dd5e-4fec-8678-dbab4148063d. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1173.504523] env[69027]: DEBUG oslo_concurrency.lockutils [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] Acquiring lock "refresh_cache-a907f1ab-3540-4bc0-8389-005233cca940" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1173.506185] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Releasing lock "refresh_cache-a907f1ab-3540-4bc0-8389-005233cca940" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1173.506458] env[69027]: DEBUG nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Instance network_info: |[{"id": "a2bc1d43-dd5e-4fec-8678-dbab4148063d", "address": "fa:16:3e:03:d7:06", "network": {"id": "1a9f38e8-1c72-43ff-9c4d-cde922e23d6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-938644940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "887eab30aaec49068e3ddf5d768a313c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2bc1d43-dd", "ovs_interfaceid": "a2bc1d43-dd5e-4fec-8678-dbab4148063d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1173.506722] env[69027]: DEBUG oslo_concurrency.lockutils [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] Acquired lock "refresh_cache-a907f1ab-3540-4bc0-8389-005233cca940" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.506918] env[69027]: DEBUG nova.network.neutron [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Refreshing network info cache for port a2bc1d43-dd5e-4fec-8678-dbab4148063d {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1173.508010] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:d7:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '257e5ea7-8b80-4301-9900-a754f1fe2031', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2bc1d43-dd5e-4fec-8678-dbab4148063d', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1173.515730] env[69027]: DEBUG oslo.service.loopingcall [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1173.518716] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1173.519196] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcb87faa-c656-44ad-947d-0823d7c27d99 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.539773] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1173.539773] env[69027]: value = "task-3395160" [ 1173.539773] env[69027]: _type = "Task" [ 1173.539773] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.547928] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395160, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.026778] env[69027]: DEBUG nova.network.neutron [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Updated VIF entry in instance network info cache for port a2bc1d43-dd5e-4fec-8678-dbab4148063d. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1174.027168] env[69027]: DEBUG nova.network.neutron [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Updating instance_info_cache with network_info: [{"id": "a2bc1d43-dd5e-4fec-8678-dbab4148063d", "address": "fa:16:3e:03:d7:06", "network": {"id": "1a9f38e8-1c72-43ff-9c4d-cde922e23d6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-938644940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "887eab30aaec49068e3ddf5d768a313c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2bc1d43-dd", "ovs_interfaceid": "a2bc1d43-dd5e-4fec-8678-dbab4148063d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.039875] env[69027]: DEBUG oslo_concurrency.lockutils [req-00a51108-760a-47e4-a242-86faae4af802 req-2df22a23-7787-47a4-97e3-aeb9ed2622db service nova] Releasing lock "refresh_cache-a907f1ab-3540-4bc0-8389-005233cca940" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1174.050621] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395160, 'name': CreateVM_Task, 'duration_secs': 0.289087} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.050776] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1174.051424] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1174.051593] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.051907] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1174.052167] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66fb260a-8e1d-4e0c-bc8c-f53abf6de3b9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.056847] env[69027]: DEBUG oslo_vmware.api [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for the task: (returnval){ [ 1174.056847] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52b4803e-b09e-e623-d3fd-9fd1bc13f412" [ 1174.056847] env[69027]: _type = "Task" [ 1174.056847] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.065212] env[69027]: DEBUG oslo_vmware.api [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52b4803e-b09e-e623-d3fd-9fd1bc13f412, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.569107] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1174.569358] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1174.569596] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1186.767502] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "90a6375b-4834-406d-abd5-5cf47b7cfc12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1191.728010] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "a907f1ab-3540-4bc0-8389-005233cca940" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.809572] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.771612] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1194.782143] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.782365] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.782535] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.782686] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1194.783844] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b47735-e69a-418b-b00e-4b038d2cbd14 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.792872] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9163a37a-cad7-43a9-af88-958056256c3f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.807716] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678f2ada-895a-4e22-b227-c8084d6e53e3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.813873] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709a933e-e1a4-4694-98f2-cfa1c32b09e2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.842693] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180862MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1194.842811] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.843016] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.914600] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.914779] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.914909] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.915046] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ed95b65-233e-406e-8d27-2a5cd2694184 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.915172] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbd6a238-1662-4c22-86ab-d31d4bb82734 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.915295] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.915435] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.915556] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bf4c80b4-bc0c-4198-9010-74fc50707745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.915672] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.915786] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a907f1ab-3540-4bc0-8389-005233cca940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.926917] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1715faa2-86ea-49f9-a993-1003aea54384 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.936683] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 5401d655-86da-41b0-9d29-3ba25d21f1ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.945883] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 16b7e547-8dc8-4305-8ff6-64736fc9cbb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.956080] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f4d49026-8558-44c7-b475-215eecba4e09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.965536] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fb7d7808-9768-4882-9405-0d07c41509fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.974502] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c39e4257-7526-461e-ad95-91defe4d51ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.984500] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance e6224db8-1a05-4832-95be-7231fda105f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1194.993856] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03e2d14d-9195-4ee5-b2e0-05b803dcfefc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1195.003110] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 45296842-f415-42eb-b67e-096465650c09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1195.012132] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 34a1076e-6a17-442d-8a71-1d49117edad5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1195.021300] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance edc3a0ff-c592-47b8-9753-1b4831bee576 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1195.030350] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 54340994-037e-4255-b32b-18d8784733c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1195.039171] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1195.040021] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1195.040021] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1195.284518] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c346dd13-39ef-4607-ba19-d61bd20ba328 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.291985] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596650c5-69c8-421d-9b1f-ac1692b93241 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.322023] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82918681-d564-4a9d-8ead-45da5c1f7cef {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.329101] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47eb4314-9115-4015-afd9-970e4b57be6c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.342410] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.351292] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1195.365878] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1195.366082] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.523s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.366647] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1197.366647] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1197.367043] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1197.385972] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1197.386406] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1197.386406] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1197.386613] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1197.386822] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1197.386963] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1197.387228] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1197.387228] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1197.387321] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1197.387442] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1197.387561] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1197.388069] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1197.771090] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.771393] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.771697] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1200.772061] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1201.773186] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.771622] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.094023] env[69027]: WARNING oslo_vmware.rw_handles [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1218.094023] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1218.094023] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1218.094023] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1218.094023] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1218.094023] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1218.094023] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1218.094023] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1218.094023] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1218.094023] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1218.094023] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1218.094023] env[69027]: ERROR oslo_vmware.rw_handles [ 1218.094903] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/1c8e6ea9-d368-47d2-9389-f5360f9ab800/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1218.096420] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1218.096670] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Copying Virtual Disk [datastore2] vmware_temp/1c8e6ea9-d368-47d2-9389-f5360f9ab800/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/1c8e6ea9-d368-47d2-9389-f5360f9ab800/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1218.096973] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2337f29c-14b3-46b3-b24c-ea46bd0e8fcc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.106402] env[69027]: DEBUG oslo_vmware.api [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 1218.106402] env[69027]: value = "task-3395161" [ 1218.106402] env[69027]: _type = "Task" [ 1218.106402] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.114469] env[69027]: DEBUG oslo_vmware.api [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': task-3395161, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.616610] env[69027]: DEBUG oslo_vmware.exceptions [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1218.616903] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1218.617483] env[69027]: ERROR nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1218.617483] env[69027]: Faults: ['InvalidArgument'] [ 1218.617483] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Traceback (most recent call last): [ 1218.617483] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1218.617483] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] yield resources [ 1218.617483] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1218.617483] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] self.driver.spawn(context, instance, image_meta, [ 1218.617483] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1218.617483] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1218.617483] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1218.617483] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] self._fetch_image_if_missing(context, vi) [ 1218.617483] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] image_cache(vi, tmp_image_ds_loc) [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] vm_util.copy_virtual_disk( [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] session._wait_for_task(vmdk_copy_task) [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] return self.wait_for_task(task_ref) [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] return evt.wait() [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] result = hub.switch() [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1218.617943] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] return self.greenlet.switch() [ 1218.618471] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1218.618471] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] self.f(*self.args, **self.kw) [ 1218.618471] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1218.618471] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] raise exceptions.translate_fault(task_info.error) [ 1218.618471] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1218.618471] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Faults: ['InvalidArgument'] [ 1218.618471] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] [ 1218.618471] env[69027]: INFO nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Terminating instance [ 1218.619430] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.619649] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1218.619886] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0576ad66-b590-4a4d-92d9-2ffc922220e7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.622100] env[69027]: DEBUG nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1218.622296] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1218.623127] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d11db5-23d7-463a-83b6-1f64b28c43df {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.630075] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1218.630289] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dbfd45e0-d5d9-4b75-a6ef-14a688dadf53 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.632514] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1218.632708] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1218.633690] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-572c6775-1e9e-4bd3-be31-094896ef69be {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.638526] env[69027]: DEBUG oslo_vmware.api [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for the task: (returnval){ [ 1218.638526] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52dc43c5-e07d-a9df-9739-1e762253941c" [ 1218.638526] env[69027]: _type = "Task" [ 1218.638526] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.645707] env[69027]: DEBUG oslo_vmware.api [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52dc43c5-e07d-a9df-9739-1e762253941c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.694665] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1218.694926] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1218.695130] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Deleting the datastore file [datastore2] d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1218.695396] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b79e48a-99a6-4ce4-8e15-5f743a4169a7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.702317] env[69027]: DEBUG oslo_vmware.api [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for the task: (returnval){ [ 1218.702317] env[69027]: value = "task-3395163" [ 1218.702317] env[69027]: _type = "Task" [ 1218.702317] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.710524] env[69027]: DEBUG oslo_vmware.api [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': task-3395163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.149622] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1219.149933] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Creating directory with path [datastore2] vmware_temp/1664402e-d4ce-405f-adf8-bd3d02ccf91f/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1219.150096] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b2d4d2d-658a-4acb-ab80-7b4c663cdfab {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.162615] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Created directory with path [datastore2] vmware_temp/1664402e-d4ce-405f-adf8-bd3d02ccf91f/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1219.162615] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Fetch image to [datastore2] vmware_temp/1664402e-d4ce-405f-adf8-bd3d02ccf91f/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1219.162615] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/1664402e-d4ce-405f-adf8-bd3d02ccf91f/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1219.163344] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe007e6-aa12-4a04-b1a6-7db29f460ed5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.169699] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d10a9d-46f2-45f0-98c6-1bc3f073ae72 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.179795] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d532afa5-bf15-45a5-bb55-b7621bba4480 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.213136] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a997b029-c44b-4ed6-903c-69edc0d8f1bc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.220134] env[69027]: DEBUG oslo_vmware.api [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Task: {'id': task-3395163, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080749} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.221698] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1219.221896] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1219.222125] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1219.222620] env[69027]: INFO nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1219.224137] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f310e490-9a2b-4d00-bdc8-7bc38a5b2894 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.226044] env[69027]: DEBUG nova.compute.claims [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1219.226225] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1219.226438] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1219.248261] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1219.358574] env[69027]: DEBUG oslo_vmware.rw_handles [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1664402e-d4ce-405f-adf8-bd3d02ccf91f/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1219.421039] env[69027]: DEBUG oslo_vmware.rw_handles [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1219.421334] env[69027]: DEBUG oslo_vmware.rw_handles [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1664402e-d4ce-405f-adf8-bd3d02ccf91f/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1219.561349] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c34ecfb-0bd4-46ee-a617-5705b7bc0a78 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.568666] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3539e7-e6db-45ae-9db5-3c8745d01201 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.598229] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29718aa7-9279-4cad-9073-958da4d59647 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.605502] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb79a547-b313-466d-88bf-5b51f6202a46 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.618336] env[69027]: DEBUG nova.compute.provider_tree [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.626635] env[69027]: DEBUG nova.scheduler.client.report [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1219.641925] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.414s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1219.641925] env[69027]: ERROR nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1219.641925] env[69027]: Faults: ['InvalidArgument'] [ 1219.641925] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Traceback (most recent call last): [ 1219.641925] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1219.641925] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] self.driver.spawn(context, instance, image_meta, [ 1219.641925] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1219.641925] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1219.641925] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1219.641925] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] self._fetch_image_if_missing(context, vi) [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] image_cache(vi, tmp_image_ds_loc) [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] vm_util.copy_virtual_disk( [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] session._wait_for_task(vmdk_copy_task) [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] return self.wait_for_task(task_ref) [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] return evt.wait() [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] result = hub.switch() [ 1219.642322] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1219.642809] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] return self.greenlet.switch() [ 1219.642809] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1219.642809] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] self.f(*self.args, **self.kw) [ 1219.642809] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1219.642809] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] raise exceptions.translate_fault(task_info.error) [ 1219.642809] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1219.642809] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Faults: ['InvalidArgument'] [ 1219.642809] env[69027]: ERROR nova.compute.manager [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] [ 1219.642809] env[69027]: DEBUG nova.compute.utils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1219.644600] env[69027]: DEBUG nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Build of instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 was re-scheduled: A specified parameter was not correct: fileType [ 1219.644600] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1219.644888] env[69027]: DEBUG nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1219.645082] env[69027]: DEBUG nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1219.645261] env[69027]: DEBUG nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1219.645442] env[69027]: DEBUG nova.network.neutron [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1220.074890] env[69027]: DEBUG nova.network.neutron [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.092203] env[69027]: INFO nova.compute.manager [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Took 0.45 seconds to deallocate network for instance. [ 1220.189972] env[69027]: INFO nova.scheduler.client.report [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Deleted allocations for instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 [ 1220.213213] env[69027]: DEBUG oslo_concurrency.lockutils [None req-095b051e-a49b-4456-bd23-7da38694e810 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 619.774s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.215111] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 421.917s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.215384] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Acquiring lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.215879] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.216130] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.219686] env[69027]: INFO nova.compute.manager [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Terminating instance [ 1220.221019] env[69027]: DEBUG nova.compute.manager [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1220.221319] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1220.222207] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87464b00-39d3-4fd4-88ea-c35470bf37f6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.231240] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b972ba6-33cb-4b89-bd1f-3bcd945efe13 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.242938] env[69027]: DEBUG nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1220.263916] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d8db9bfa-336f-48b6-b0b5-3aa3e97f5698 could not be found. [ 1220.264141] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1220.264381] env[69027]: INFO nova.compute.manager [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1220.264586] env[69027]: DEBUG oslo.service.loopingcall [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1220.264808] env[69027]: DEBUG nova.compute.manager [-] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1220.264906] env[69027]: DEBUG nova.network.neutron [-] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1220.289719] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1220.289933] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.291434] env[69027]: INFO nova.compute.claims [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1220.294509] env[69027]: DEBUG nova.network.neutron [-] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.304038] env[69027]: INFO nova.compute.manager [-] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] Took 0.04 seconds to deallocate network for instance. [ 1220.404155] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1f4b9995-a05c-4528-ad21-c4a07176cba8 tempest-ListServerFiltersTestJSON-574454295 tempest-ListServerFiltersTestJSON-574454295-project-member] Lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.189s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.405718] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 78.331s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1220.405718] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d8db9bfa-336f-48b6-b0b5-3aa3e97f5698] During sync_power_state the instance has a pending task (deleting). Skip. [ 1220.405718] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "d8db9bfa-336f-48b6-b0b5-3aa3e97f5698" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.590023] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fca2205-d344-405b-864e-faa546a8d817 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.596714] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedcbfb4-70fe-453f-841d-710f01c798a1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.625061] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2d1fb2-295b-41d9-a3d8-54a22fc92c60 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.631422] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b33f544-202c-4c04-8ac0-1931c7c91262 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.643633] env[69027]: DEBUG nova.compute.provider_tree [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1220.652266] env[69027]: DEBUG nova.scheduler.client.report [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1220.665376] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.375s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1220.665945] env[69027]: DEBUG nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1220.698770] env[69027]: DEBUG nova.compute.utils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1220.700295] env[69027]: DEBUG nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1220.700483] env[69027]: DEBUG nova.network.neutron [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1220.709066] env[69027]: DEBUG nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1220.765009] env[69027]: DEBUG nova.policy [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ab5ab147c6d4be89e95d1d92900ae8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '95ea6520fd384627b337a00e71b14cd1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1220.776634] env[69027]: DEBUG nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1220.801574] env[69027]: DEBUG nova.virt.hardware [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1220.801811] env[69027]: DEBUG nova.virt.hardware [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1220.801968] env[69027]: DEBUG nova.virt.hardware [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1220.802171] env[69027]: DEBUG nova.virt.hardware [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1220.802319] env[69027]: DEBUG nova.virt.hardware [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1220.802496] env[69027]: DEBUG nova.virt.hardware [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1220.802676] env[69027]: DEBUG nova.virt.hardware [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1220.802831] env[69027]: DEBUG nova.virt.hardware [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1220.802994] env[69027]: DEBUG nova.virt.hardware [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1220.803169] env[69027]: DEBUG nova.virt.hardware [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1220.803341] env[69027]: DEBUG nova.virt.hardware [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1220.804216] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815999cb-e0e8-46f9-bd3e-acf40cd1e469 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.813877] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd07f6a-a378-4861-85f2-202fb5811a9f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.194643] env[69027]: DEBUG nova.network.neutron [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Successfully created port: 08801eed-cd2b-4c93-975f-8c587b8b6992 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1221.867759] env[69027]: DEBUG nova.network.neutron [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Successfully updated port: 08801eed-cd2b-4c93-975f-8c587b8b6992 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1221.884586] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquiring lock "refresh_cache-1715faa2-86ea-49f9-a993-1003aea54384" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1221.884781] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquired lock "refresh_cache-1715faa2-86ea-49f9-a993-1003aea54384" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1221.884938] env[69027]: DEBUG nova.network.neutron [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1221.954447] env[69027]: DEBUG nova.network.neutron [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1222.172358] env[69027]: DEBUG nova.network.neutron [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Updating instance_info_cache with network_info: [{"id": "08801eed-cd2b-4c93-975f-8c587b8b6992", "address": "fa:16:3e:49:5e:c2", "network": {"id": "e198af70-a0d5-4413-b845-509f9f0a7783", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1509779051-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95ea6520fd384627b337a00e71b14cd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08801eed-cd", "ovs_interfaceid": "08801eed-cd2b-4c93-975f-8c587b8b6992", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.184292] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Releasing lock "refresh_cache-1715faa2-86ea-49f9-a993-1003aea54384" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1222.184585] env[69027]: DEBUG nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Instance network_info: |[{"id": "08801eed-cd2b-4c93-975f-8c587b8b6992", "address": "fa:16:3e:49:5e:c2", "network": {"id": "e198af70-a0d5-4413-b845-509f9f0a7783", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1509779051-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95ea6520fd384627b337a00e71b14cd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08801eed-cd", "ovs_interfaceid": "08801eed-cd2b-4c93-975f-8c587b8b6992", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1222.185022] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:5e:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08801eed-cd2b-4c93-975f-8c587b8b6992', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1222.194020] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Creating folder: Project (95ea6520fd384627b337a00e71b14cd1). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1222.194020] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41d4071b-3cf8-45bb-aad1-d26b261d17b7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.204893] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Created folder: Project (95ea6520fd384627b337a00e71b14cd1) in parent group-v677321. [ 1222.205178] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Creating folder: Instances. Parent ref: group-v677384. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1222.205424] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-374136a6-6b55-4b7f-b5f6-129c9ca205de {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.209357] env[69027]: DEBUG nova.compute.manager [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Received event network-vif-plugged-08801eed-cd2b-4c93-975f-8c587b8b6992 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1222.209573] env[69027]: DEBUG oslo_concurrency.lockutils [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] Acquiring lock "1715faa2-86ea-49f9-a993-1003aea54384-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.209779] env[69027]: DEBUG oslo_concurrency.lockutils [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] Lock "1715faa2-86ea-49f9-a993-1003aea54384-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1222.209950] env[69027]: DEBUG oslo_concurrency.lockutils [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] Lock "1715faa2-86ea-49f9-a993-1003aea54384-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1222.210157] env[69027]: DEBUG nova.compute.manager [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] No waiting events found dispatching network-vif-plugged-08801eed-cd2b-4c93-975f-8c587b8b6992 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1222.210367] env[69027]: WARNING nova.compute.manager [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Received unexpected event network-vif-plugged-08801eed-cd2b-4c93-975f-8c587b8b6992 for instance with vm_state building and task_state spawning. [ 1222.210489] env[69027]: DEBUG nova.compute.manager [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Received event network-changed-08801eed-cd2b-4c93-975f-8c587b8b6992 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1222.210645] env[69027]: DEBUG nova.compute.manager [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Refreshing instance network info cache due to event network-changed-08801eed-cd2b-4c93-975f-8c587b8b6992. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1222.210828] env[69027]: DEBUG oslo_concurrency.lockutils [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] Acquiring lock "refresh_cache-1715faa2-86ea-49f9-a993-1003aea54384" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1222.210963] env[69027]: DEBUG oslo_concurrency.lockutils [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] Acquired lock "refresh_cache-1715faa2-86ea-49f9-a993-1003aea54384" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.211132] env[69027]: DEBUG nova.network.neutron [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Refreshing network info cache for port 08801eed-cd2b-4c93-975f-8c587b8b6992 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1222.221798] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Created folder: Instances in parent group-v677384. [ 1222.222042] env[69027]: DEBUG oslo.service.loopingcall [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1222.223286] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1222.223286] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6cb212a8-ca6e-4189-822c-f542bc7b133b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.242341] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1222.242341] env[69027]: value = "task-3395166" [ 1222.242341] env[69027]: _type = "Task" [ 1222.242341] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.253122] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395166, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.754631] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395166, 'name': CreateVM_Task, 'duration_secs': 0.290071} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.754956] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1222.762506] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1222.762630] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.763007] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1222.763316] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37846ec2-8b87-4563-9869-4e5a128ed4db {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.765585] env[69027]: DEBUG nova.network.neutron [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Updated VIF entry in instance network info cache for port 08801eed-cd2b-4c93-975f-8c587b8b6992. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1222.766072] env[69027]: DEBUG nova.network.neutron [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Updating instance_info_cache with network_info: [{"id": "08801eed-cd2b-4c93-975f-8c587b8b6992", "address": "fa:16:3e:49:5e:c2", "network": {"id": "e198af70-a0d5-4413-b845-509f9f0a7783", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1509779051-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "95ea6520fd384627b337a00e71b14cd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08801eed-cd", "ovs_interfaceid": "08801eed-cd2b-4c93-975f-8c587b8b6992", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.770107] env[69027]: DEBUG oslo_vmware.api [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Waiting for the task: (returnval){ [ 1222.770107] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]523207ac-f455-f856-cb2d-f065d65e4ad0" [ 1222.770107] env[69027]: _type = "Task" [ 1222.770107] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.776839] env[69027]: DEBUG oslo_concurrency.lockutils [req-de584db5-dcbc-484f-ab35-8ad8c19150ea req-f5037460-f3a4-41e2-9b0d-8a4850ad63b9 service nova] Releasing lock "refresh_cache-1715faa2-86ea-49f9-a993-1003aea54384" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1222.780212] env[69027]: DEBUG oslo_vmware.api [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]523207ac-f455-f856-cb2d-f065d65e4ad0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.280328] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1223.280577] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1223.280792] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1227.939181] env[69027]: DEBUG oslo_concurrency.lockutils [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquiring lock "1715faa2-86ea-49f9-a993-1003aea54384" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.752910] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1228.753377] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.207736] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.208054] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1236.450900] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "03d9d361-da15-4fb7-acfb-049098183bc3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1236.451531] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "03d9d361-da15-4fb7-acfb-049098183bc3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.119581] env[69027]: DEBUG oslo_concurrency.lockutils [None req-418a2ef6-73d0-494d-8722-304c1351ecf2 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "3054e7f1-4a02-47ca-91fd-4d8669004e8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.119955] env[69027]: DEBUG oslo_concurrency.lockutils [None req-418a2ef6-73d0-494d-8722-304c1351ecf2 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "3054e7f1-4a02-47ca-91fd-4d8669004e8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.767166] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.771323] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1255.781689] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.781916] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.782105] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1255.782267] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1255.783413] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce41f1d-43ec-4ba8-97e9-458e70c1b936 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.796180] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfaedfcd-1b83-405c-a13e-b8e7940417e3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.811227] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df2b219-e918-4e96-8c2a-9e1870c885da {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.818408] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37fc2ff5-2a2e-4e92-bcca-e96edcf0b311 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.854568] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180935MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1255.854803] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.855042] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.947984] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.948244] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.948303] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ed95b65-233e-406e-8d27-2a5cd2694184 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.948425] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbd6a238-1662-4c22-86ab-d31d4bb82734 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.948543] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.948659] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.948773] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bf4c80b4-bc0c-4198-9010-74fc50707745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.948887] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.949009] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a907f1ab-3540-4bc0-8389-005233cca940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.949135] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1715faa2-86ea-49f9-a993-1003aea54384 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1255.965022] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 45296842-f415-42eb-b67e-096465650c09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.973951] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 34a1076e-6a17-442d-8a71-1d49117edad5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.985165] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance edc3a0ff-c592-47b8-9753-1b4831bee576 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1255.999176] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 54340994-037e-4255-b32b-18d8784733c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.008899] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.022869] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.035291] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.086501] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03d9d361-da15-4fb7-acfb-049098183bc3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.103820] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3054e7f1-4a02-47ca-91fd-4d8669004e8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1256.104188] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1256.104296] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1256.409155] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71f7361-6fb4-4eb5-999a-ef1331942b81 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.418348] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62057a4b-c26c-4c5a-8638-6c98d9e26b31 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.453713] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c0b955-f9df-4a89-8e44-aa377df7a878 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.461239] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f78a9c-7ca6-4cc3-9d33-adfeee431245 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.474306] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1256.484852] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1256.502124] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1256.502332] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.647s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.502809] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.772055] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.772055] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1257.772055] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1257.793527] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.793690] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.793823] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.793983] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.794132] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.794257] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.794378] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.794500] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.794618] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.794733] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1257.794852] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1258.771457] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1260.660909] env[69027]: DEBUG oslo_concurrency.lockutils [None req-99ab5b10-dbaf-4be1-8b5f-e21f9f027a30 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "c6a011b8-3c47-4e37-a9f1-e36a546048ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.660909] env[69027]: DEBUG oslo_concurrency.lockutils [None req-99ab5b10-dbaf-4be1-8b5f-e21f9f027a30 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "c6a011b8-3c47-4e37-a9f1-e36a546048ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.771695] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1260.771695] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1260.771695] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1262.767763] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.771711] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.772073] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1267.380518] env[69027]: DEBUG oslo_concurrency.lockutils [None req-906ac4c6-1d8b-467c-bc05-d7dd024b8866 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] Acquiring lock "bfe81ca7-70dc-4e48-9f8b-afa901baec0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.380901] env[69027]: DEBUG oslo_concurrency.lockutils [None req-906ac4c6-1d8b-467c-bc05-d7dd024b8866 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] Lock "bfe81ca7-70dc-4e48-9f8b-afa901baec0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1269.083785] env[69027]: WARNING oslo_vmware.rw_handles [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1269.083785] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1269.083785] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1269.083785] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1269.083785] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1269.083785] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1269.083785] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1269.083785] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1269.083785] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1269.083785] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1269.083785] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1269.083785] env[69027]: ERROR oslo_vmware.rw_handles [ 1269.084385] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/1664402e-d4ce-405f-adf8-bd3d02ccf91f/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1269.086376] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1269.086647] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Copying Virtual Disk [datastore2] vmware_temp/1664402e-d4ce-405f-adf8-bd3d02ccf91f/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/1664402e-d4ce-405f-adf8-bd3d02ccf91f/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1269.086967] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c318450a-5b53-4c95-91ed-e627dcbd2625 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.097813] env[69027]: DEBUG oslo_vmware.api [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for the task: (returnval){ [ 1269.097813] env[69027]: value = "task-3395167" [ 1269.097813] env[69027]: _type = "Task" [ 1269.097813] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.106321] env[69027]: DEBUG oslo_vmware.api [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Task: {'id': task-3395167, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.607949] env[69027]: DEBUG oslo_vmware.exceptions [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1269.608264] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1269.609027] env[69027]: ERROR nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1269.609027] env[69027]: Faults: ['InvalidArgument'] [ 1269.609027] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Traceback (most recent call last): [ 1269.609027] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1269.609027] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] yield resources [ 1269.609027] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1269.609027] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] self.driver.spawn(context, instance, image_meta, [ 1269.609027] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1269.609027] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1269.609027] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1269.609027] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] self._fetch_image_if_missing(context, vi) [ 1269.609027] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] image_cache(vi, tmp_image_ds_loc) [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] vm_util.copy_virtual_disk( [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] session._wait_for_task(vmdk_copy_task) [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] return self.wait_for_task(task_ref) [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] return evt.wait() [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] result = hub.switch() [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1269.609371] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] return self.greenlet.switch() [ 1269.609643] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1269.609643] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] self.f(*self.args, **self.kw) [ 1269.609643] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1269.609643] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] raise exceptions.translate_fault(task_info.error) [ 1269.609643] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1269.609643] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Faults: ['InvalidArgument'] [ 1269.609643] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] [ 1269.609643] env[69027]: INFO nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Terminating instance [ 1269.611574] env[69027]: DEBUG nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1269.611774] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1269.612114] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1269.612322] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1269.613059] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be004c7-57ce-4a7a-8bb3-81696c1ceb03 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.615695] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07ebc8e5-cff1-4c03-9362-19f3a7f065df {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.621327] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1269.621543] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69f6dc79-11ea-4dc4-8599-5331e7b91152 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.623719] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1269.623893] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1269.624861] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ce17cd3-ec86-4f45-a1bd-bf679a6786e2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.630029] env[69027]: DEBUG oslo_vmware.api [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Waiting for the task: (returnval){ [ 1269.630029] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52874368-0414-36ad-9759-11da1debb0aa" [ 1269.630029] env[69027]: _type = "Task" [ 1269.630029] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.636346] env[69027]: DEBUG oslo_vmware.api [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52874368-0414-36ad-9759-11da1debb0aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.688030] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1269.688279] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1269.688463] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Deleting the datastore file [datastore2] 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1269.688732] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-007eb71f-6194-49d8-850e-c448dbc9dacb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.695039] env[69027]: DEBUG oslo_vmware.api [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for the task: (returnval){ [ 1269.695039] env[69027]: value = "task-3395169" [ 1269.695039] env[69027]: _type = "Task" [ 1269.695039] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.702501] env[69027]: DEBUG oslo_vmware.api [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Task: {'id': task-3395169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.139751] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1270.140038] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Creating directory with path [datastore2] vmware_temp/8dc2d69f-ac62-4c71-b70f-999f4310eb81/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1270.140249] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c719c1d3-054d-4a1f-8fce-27fd0ff279bd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.151755] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Created directory with path [datastore2] vmware_temp/8dc2d69f-ac62-4c71-b70f-999f4310eb81/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1270.151949] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Fetch image to [datastore2] vmware_temp/8dc2d69f-ac62-4c71-b70f-999f4310eb81/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1270.152140] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/8dc2d69f-ac62-4c71-b70f-999f4310eb81/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1270.152880] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8f8b3d-0036-468b-a1c3-b31c22a6bb87 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.159108] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96eca4b8-132a-4170-b0e4-b27b82ddd04a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.167769] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4bde05-2e25-4295-a4ab-4607f04879fc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.200931] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dca4279-35a7-41f8-8689-b99479811d02 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.207572] env[69027]: DEBUG oslo_vmware.api [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Task: {'id': task-3395169, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078672} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.208927] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1270.209138] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1270.209314] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1270.209491] env[69027]: INFO nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1270.211896] env[69027]: DEBUG nova.compute.claims [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1270.212118] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.212350] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.215623] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ff0ff61b-6982-438d-bdfe-bef3d0acadf2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.236023] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1270.419106] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1270.420734] env[69027]: ERROR nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1270.420734] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Traceback (most recent call last): [ 1270.420734] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1270.420734] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1270.420734] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1270.420734] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] result = getattr(controller, method)(*args, **kwargs) [ 1270.420734] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1270.420734] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._get(image_id) [ 1270.420734] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1270.420734] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1270.420734] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] resp, body = self.http_client.get(url, headers=header) [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.request(url, 'GET', **kwargs) [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._handle_response(resp) [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise exc.from_response(resp, resp.content) [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] During handling of the above exception, another exception occurred: [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1270.421131] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Traceback (most recent call last): [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] yield resources [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self.driver.spawn(context, instance, image_meta, [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._fetch_image_if_missing(context, vi) [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] image_fetch(context, vi, tmp_image_ds_loc) [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] images.fetch_image( [ 1270.421457] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] metadata = IMAGE_API.get(context, image_ref) [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return session.show(context, image_id, [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] _reraise_translated_image_exception(image_id) [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise new_exc.with_traceback(exc_trace) [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] result = getattr(controller, method)(*args, **kwargs) [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1270.421769] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._get(image_id) [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] resp, body = self.http_client.get(url, headers=header) [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.request(url, 'GET', **kwargs) [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._handle_response(resp) [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise exc.from_response(resp, resp.content) [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] nova.exception.ImageNotAuthorized: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1270.422169] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1270.422463] env[69027]: INFO nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Terminating instance [ 1270.422659] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.422879] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1270.423528] env[69027]: DEBUG nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1270.423730] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1270.426389] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e5ddee3-16da-4b4b-a4be-a241f2fe9345 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.428742] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cd5955-446b-4c38-a0d8-4ea92b78fd26 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.435846] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1270.436078] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bef8734d-e3ed-40f0-ac8e-c8d5d6141857 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.438207] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1270.438400] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1270.439329] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a3236f3-f2ab-40e0-bd4a-78daf451a286 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.445727] env[69027]: DEBUG oslo_vmware.api [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for the task: (returnval){ [ 1270.445727] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52338ab8-6722-dce3-0c5a-32ae03d4058c" [ 1270.445727] env[69027]: _type = "Task" [ 1270.445727] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.452758] env[69027]: DEBUG oslo_vmware.api [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52338ab8-6722-dce3-0c5a-32ae03d4058c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.483105] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af165386-9d89-4f7b-8312-6ff4647e30dd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.489657] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2946a8e-5c22-4847-be5d-232e371cdda7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.520557] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5bfd5f-03a8-45e7-8076-238908a69bc0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.523131] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1270.523335] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1270.523514] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Deleting the datastore file [datastore2] 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1270.523742] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85bd463f-4641-47e3-997d-9e4d8580fa04 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.531903] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a15278-228c-4516-8961-9430ebb64317 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.536072] env[69027]: DEBUG oslo_vmware.api [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Waiting for the task: (returnval){ [ 1270.536072] env[69027]: value = "task-3395171" [ 1270.536072] env[69027]: _type = "Task" [ 1270.536072] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.546308] env[69027]: DEBUG nova.compute.provider_tree [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1270.551439] env[69027]: DEBUG oslo_vmware.api [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Task: {'id': task-3395171, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.554863] env[69027]: DEBUG nova.scheduler.client.report [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1270.568583] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.356s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1270.569095] env[69027]: ERROR nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1270.569095] env[69027]: Faults: ['InvalidArgument'] [ 1270.569095] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Traceback (most recent call last): [ 1270.569095] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1270.569095] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] self.driver.spawn(context, instance, image_meta, [ 1270.569095] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1270.569095] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1270.569095] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1270.569095] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] self._fetch_image_if_missing(context, vi) [ 1270.569095] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1270.569095] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] image_cache(vi, tmp_image_ds_loc) [ 1270.569095] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] vm_util.copy_virtual_disk( [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] session._wait_for_task(vmdk_copy_task) [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] return self.wait_for_task(task_ref) [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] return evt.wait() [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] result = hub.switch() [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] return self.greenlet.switch() [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1270.569425] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] self.f(*self.args, **self.kw) [ 1270.569746] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1270.569746] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] raise exceptions.translate_fault(task_info.error) [ 1270.569746] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1270.569746] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Faults: ['InvalidArgument'] [ 1270.569746] env[69027]: ERROR nova.compute.manager [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] [ 1270.569878] env[69027]: DEBUG nova.compute.utils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1270.571158] env[69027]: DEBUG nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Build of instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 was re-scheduled: A specified parameter was not correct: fileType [ 1270.571158] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1270.571533] env[69027]: DEBUG nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1270.571865] env[69027]: DEBUG nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1270.572099] env[69027]: DEBUG nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1270.572288] env[69027]: DEBUG nova.network.neutron [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1270.872921] env[69027]: DEBUG nova.network.neutron [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1270.888414] env[69027]: INFO nova.compute.manager [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Took 0.31 seconds to deallocate network for instance. [ 1270.958458] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1270.958458] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Creating directory with path [datastore2] vmware_temp/682ed62c-f9e5-492d-a858-f6346ed99285/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1270.958458] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d21541d8-3851-46c4-96c2-240106eb316b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.971789] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Created directory with path [datastore2] vmware_temp/682ed62c-f9e5-492d-a858-f6346ed99285/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1270.971789] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Fetch image to [datastore2] vmware_temp/682ed62c-f9e5-492d-a858-f6346ed99285/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1270.972208] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/682ed62c-f9e5-492d-a858-f6346ed99285/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1270.973713] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868b67d5-1180-4143-8259-18336fca63c5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.984765] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f1f5cb-8eb5-48ac-adbe-500ecf760b4d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.995652] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113cc2c1-4fc0-492e-9d76-c2f7268db061 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.031442] env[69027]: INFO nova.scheduler.client.report [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Deleted allocations for instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 [ 1271.038362] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c30dbb-e13c-43e7-a9c5-9fdeff5aa952 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.054776] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-84f92c86-261e-4a98-af44-d051f5bd2beb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.056196] env[69027]: DEBUG oslo_vmware.api [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Task: {'id': task-3395171, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06434} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.056196] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1271.056196] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1271.056196] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1271.056389] env[69027]: INFO nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1271.061031] env[69027]: DEBUG nova.compute.claims [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1271.061031] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.061031] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.074082] env[69027]: DEBUG oslo_concurrency.lockutils [None req-aff7971f-a49c-428e-9e1a-b27f2808066a tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 670.541s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.075433] env[69027]: DEBUG oslo_concurrency.lockutils [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 472.031s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.075812] env[69027]: DEBUG oslo_concurrency.lockutils [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.075881] env[69027]: DEBUG oslo_concurrency.lockutils [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.076455] env[69027]: DEBUG oslo_concurrency.lockutils [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.079943] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1271.082279] env[69027]: INFO nova.compute.manager [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Terminating instance [ 1271.085229] env[69027]: DEBUG nova.compute.manager [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1271.085410] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1271.086574] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32ac8fc1-1627-4a41-aef0-725e56d5a629 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.088384] env[69027]: DEBUG nova.compute.manager [None req-76159d44-04cc-4630-8c6a-18dda9f4f1f6 tempest-InstanceActionsNegativeTestJSON-598876948 tempest-InstanceActionsNegativeTestJSON-598876948-project-member] [instance: 5401d655-86da-41b0-9d29-3ba25d21f1ad] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1271.097691] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b50541d-a1c7-47c0-a504-1fac259cd3c2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.130797] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 362a7b3c-f0b2-46e6-a9fa-2c284a059d73 could not be found. [ 1271.131015] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1271.131207] env[69027]: INFO nova.compute.manager [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1271.131460] env[69027]: DEBUG oslo.service.loopingcall [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1271.132324] env[69027]: DEBUG nova.compute.manager [-] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1271.132425] env[69027]: DEBUG nova.network.neutron [-] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1271.137372] env[69027]: DEBUG nova.compute.manager [None req-76159d44-04cc-4630-8c6a-18dda9f4f1f6 tempest-InstanceActionsNegativeTestJSON-598876948 tempest-InstanceActionsNegativeTestJSON-598876948-project-member] [instance: 5401d655-86da-41b0-9d29-3ba25d21f1ad] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1271.161782] env[69027]: DEBUG oslo_concurrency.lockutils [None req-76159d44-04cc-4630-8c6a-18dda9f4f1f6 tempest-InstanceActionsNegativeTestJSON-598876948 tempest-InstanceActionsNegativeTestJSON-598876948-project-member] Lock "5401d655-86da-41b0-9d29-3ba25d21f1ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.777s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.165270] env[69027]: DEBUG nova.network.neutron [-] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.169787] env[69027]: DEBUG oslo_vmware.rw_handles [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/682ed62c-f9e5-492d-a858-f6346ed99285/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1271.172790] env[69027]: DEBUG nova.compute.manager [None req-dd737303-2d0a-47fa-ac07-a9e9419d00e1 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 16b7e547-8dc8-4305-8ff6-64736fc9cbb3] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1271.229054] env[69027]: INFO nova.compute.manager [-] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] Took 0.10 seconds to deallocate network for instance. [ 1271.230434] env[69027]: DEBUG nova.compute.manager [None req-dd737303-2d0a-47fa-ac07-a9e9419d00e1 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 16b7e547-8dc8-4305-8ff6-64736fc9cbb3] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1271.240131] env[69027]: DEBUG oslo_vmware.rw_handles [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1271.240324] env[69027]: DEBUG oslo_vmware.rw_handles [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/682ed62c-f9e5-492d-a858-f6346ed99285/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1271.258259] env[69027]: DEBUG oslo_concurrency.lockutils [None req-dd737303-2d0a-47fa-ac07-a9e9419d00e1 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "16b7e547-8dc8-4305-8ff6-64736fc9cbb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.229s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.272084] env[69027]: DEBUG nova.compute.manager [None req-9cad0cf8-f8c6-426f-85d9-a82e8458cd3b tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] [instance: f4d49026-8558-44c7-b475-215eecba4e09] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1271.299478] env[69027]: DEBUG nova.compute.manager [None req-9cad0cf8-f8c6-426f-85d9-a82e8458cd3b tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] [instance: f4d49026-8558-44c7-b475-215eecba4e09] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1271.318359] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9cad0cf8-f8c6-426f-85d9-a82e8458cd3b tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Lock "f4d49026-8558-44c7-b475-215eecba4e09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.788s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.334811] env[69027]: DEBUG nova.compute.manager [None req-9cad0cf8-f8c6-426f-85d9-a82e8458cd3b tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] [instance: fb7d7808-9768-4882-9405-0d07c41509fe] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1271.339472] env[69027]: DEBUG oslo_concurrency.lockutils [None req-20570e4a-5654-4559-a0af-71bb04f6bbff tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.264s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.340651] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 129.267s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.340907] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 362a7b3c-f0b2-46e6-a9fa-2c284a059d73] During sync_power_state the instance has a pending task (deleting). Skip. [ 1271.341015] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "362a7b3c-f0b2-46e6-a9fa-2c284a059d73" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.359582] env[69027]: DEBUG nova.compute.manager [None req-9cad0cf8-f8c6-426f-85d9-a82e8458cd3b tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] [instance: fb7d7808-9768-4882-9405-0d07c41509fe] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1271.380748] env[69027]: DEBUG oslo_concurrency.lockutils [None req-9cad0cf8-f8c6-426f-85d9-a82e8458cd3b tempest-MultipleCreateTestJSON-982064491 tempest-MultipleCreateTestJSON-982064491-project-member] Lock "fb7d7808-9768-4882-9405-0d07c41509fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.826s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.394097] env[69027]: DEBUG nova.compute.manager [None req-6b581503-68a2-4485-ac65-aaebe7770868 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: c39e4257-7526-461e-ad95-91defe4d51ac] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1271.418236] env[69027]: DEBUG nova.compute.manager [None req-6b581503-68a2-4485-ac65-aaebe7770868 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: c39e4257-7526-461e-ad95-91defe4d51ac] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1271.437314] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6b581503-68a2-4485-ac65-aaebe7770868 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "c39e4257-7526-461e-ad95-91defe4d51ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.318s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.440011] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411eee9a-0866-425e-85a6-d5485fc73780 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.448245] env[69027]: DEBUG nova.compute.manager [None req-00808fa4-ac0f-4b32-b8b4-ffc6d98272bd tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] [instance: e6224db8-1a05-4832-95be-7231fda105f3] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1271.451327] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880101a2-38ae-43c9-859d-cbf6115e8241 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.482866] env[69027]: DEBUG nova.compute.manager [None req-00808fa4-ac0f-4b32-b8b4-ffc6d98272bd tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] [instance: e6224db8-1a05-4832-95be-7231fda105f3] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1271.484520] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3428abba-07b2-4c13-9cd5-f402b7cbc67d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.492681] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab1a99d-6b7f-4aa4-9e77-d0534acb15a3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.509051] env[69027]: DEBUG nova.compute.provider_tree [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.510880] env[69027]: DEBUG oslo_concurrency.lockutils [None req-00808fa4-ac0f-4b32-b8b4-ffc6d98272bd tempest-AttachVolumeNegativeTest-1328278852 tempest-AttachVolumeNegativeTest-1328278852-project-member] Lock "e6224db8-1a05-4832-95be-7231fda105f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.603s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.517537] env[69027]: DEBUG nova.scheduler.client.report [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1271.524028] env[69027]: DEBUG nova.compute.manager [None req-b9091b43-688c-44d6-bf02-1fc70c4f47f0 tempest-ServersTestMultiNic-1655463347 tempest-ServersTestMultiNic-1655463347-project-member] [instance: 03e2d14d-9195-4ee5-b2e0-05b803dcfefc] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1271.531089] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.470s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.531831] env[69027]: ERROR nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1271.531831] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Traceback (most recent call last): [ 1271.531831] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1271.531831] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1271.531831] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1271.531831] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] result = getattr(controller, method)(*args, **kwargs) [ 1271.531831] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1271.531831] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._get(image_id) [ 1271.531831] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1271.531831] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1271.531831] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] resp, body = self.http_client.get(url, headers=header) [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.request(url, 'GET', **kwargs) [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._handle_response(resp) [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise exc.from_response(resp, resp.content) [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] During handling of the above exception, another exception occurred: [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.532136] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Traceback (most recent call last): [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self.driver.spawn(context, instance, image_meta, [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._fetch_image_if_missing(context, vi) [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] image_fetch(context, vi, tmp_image_ds_loc) [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] images.fetch_image( [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] metadata = IMAGE_API.get(context, image_ref) [ 1271.532410] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return session.show(context, image_id, [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] _reraise_translated_image_exception(image_id) [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise new_exc.with_traceback(exc_trace) [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] result = getattr(controller, method)(*args, **kwargs) [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._get(image_id) [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1271.532674] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1271.532965] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1271.532965] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] resp, body = self.http_client.get(url, headers=header) [ 1271.532965] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1271.532965] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.request(url, 'GET', **kwargs) [ 1271.532965] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1271.532965] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._handle_response(resp) [ 1271.532965] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1271.532965] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise exc.from_response(resp, resp.content) [ 1271.532965] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] nova.exception.ImageNotAuthorized: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1271.532965] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.533230] env[69027]: DEBUG nova.compute.utils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1271.533895] env[69027]: DEBUG nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Build of instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b was re-scheduled: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1271.534428] env[69027]: DEBUG nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1271.534607] env[69027]: DEBUG nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1271.534765] env[69027]: DEBUG nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1271.534930] env[69027]: DEBUG nova.network.neutron [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1271.554199] env[69027]: DEBUG nova.compute.manager [None req-b9091b43-688c-44d6-bf02-1fc70c4f47f0 tempest-ServersTestMultiNic-1655463347 tempest-ServersTestMultiNic-1655463347-project-member] [instance: 03e2d14d-9195-4ee5-b2e0-05b803dcfefc] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1271.574777] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b9091b43-688c-44d6-bf02-1fc70c4f47f0 tempest-ServersTestMultiNic-1655463347 tempest-ServersTestMultiNic-1655463347-project-member] Lock "03e2d14d-9195-4ee5-b2e0-05b803dcfefc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.787s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.585092] env[69027]: DEBUG nova.compute.manager [None req-50e2b5d3-22cf-4de7-95d3-24295f8a6a2b tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 45296842-f415-42eb-b67e-096465650c09] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1271.609974] env[69027]: DEBUG nova.compute.manager [None req-50e2b5d3-22cf-4de7-95d3-24295f8a6a2b tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 45296842-f415-42eb-b67e-096465650c09] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1271.630622] env[69027]: DEBUG oslo_concurrency.lockutils [None req-50e2b5d3-22cf-4de7-95d3-24295f8a6a2b tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "45296842-f415-42eb-b67e-096465650c09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.811s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.642067] env[69027]: DEBUG nova.compute.manager [None req-79a7f2a0-af60-4580-94b4-10d47aa34834 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] [instance: 34a1076e-6a17-442d-8a71-1d49117edad5] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1271.668043] env[69027]: DEBUG neutronclient.v2_0.client [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69027) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1271.669736] env[69027]: ERROR nova.compute.manager [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1271.669736] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Traceback (most recent call last): [ 1271.669736] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1271.669736] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1271.669736] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1271.669736] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] result = getattr(controller, method)(*args, **kwargs) [ 1271.669736] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1271.669736] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._get(image_id) [ 1271.669736] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1271.669736] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1271.669736] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] resp, body = self.http_client.get(url, headers=header) [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.request(url, 'GET', **kwargs) [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._handle_response(resp) [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise exc.from_response(resp, resp.content) [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] During handling of the above exception, another exception occurred: [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.670152] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Traceback (most recent call last): [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self.driver.spawn(context, instance, image_meta, [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._fetch_image_if_missing(context, vi) [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] image_fetch(context, vi, tmp_image_ds_loc) [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] images.fetch_image( [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] metadata = IMAGE_API.get(context, image_ref) [ 1271.670408] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return session.show(context, image_id, [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] _reraise_translated_image_exception(image_id) [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise new_exc.with_traceback(exc_trace) [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] result = getattr(controller, method)(*args, **kwargs) [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._get(image_id) [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1271.670670] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] resp, body = self.http_client.get(url, headers=header) [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.request(url, 'GET', **kwargs) [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self._handle_response(resp) [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise exc.from_response(resp, resp.content) [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] nova.exception.ImageNotAuthorized: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] During handling of the above exception, another exception occurred: [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.670920] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Traceback (most recent call last): [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._build_and_run_instance(context, instance, image, [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise exception.RescheduledException( [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] nova.exception.RescheduledException: Build of instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b was re-scheduled: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] During handling of the above exception, another exception occurred: [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Traceback (most recent call last): [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1271.671251] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] exception_handler_v20(status_code, error_body) [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise client_exc(message=error_message, [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Neutron server returns request_ids: ['req-33dab029-07ce-4554-a82e-d43f6c163f38'] [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] During handling of the above exception, another exception occurred: [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Traceback (most recent call last): [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._deallocate_network(context, instance, requested_networks) [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self.network_api.deallocate_for_instance( [ 1271.671515] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] data = neutron.list_ports(**search_opts) [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.list('ports', self.ports_path, retrieve_all, [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] for r in self._pagination(collection, path, **params): [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] res = self.get(path, params=params) [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1271.671803] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.retry_request("GET", action, body=body, [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.do_request(method, action, body=body, [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._handle_fault_response(status_code, replybody, resp) [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise exception.Unauthorized() [ 1271.672081] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] nova.exception.Unauthorized: Not authorized. [ 1271.672333] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1271.672934] env[69027]: DEBUG nova.compute.manager [None req-79a7f2a0-af60-4580-94b4-10d47aa34834 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] [instance: 34a1076e-6a17-442d-8a71-1d49117edad5] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1271.703889] env[69027]: DEBUG oslo_concurrency.lockutils [None req-79a7f2a0-af60-4580-94b4-10d47aa34834 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] Lock "34a1076e-6a17-442d-8a71-1d49117edad5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.295s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.717180] env[69027]: DEBUG nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1271.738436] env[69027]: INFO nova.scheduler.client.report [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Deleted allocations for instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b [ 1271.763510] env[69027]: DEBUG oslo_concurrency.lockutils [None req-55895c6c-ce3f-4681-a5c6-fe3db816f713 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.504s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.765420] env[69027]: DEBUG oslo_concurrency.lockutils [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.611s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.765740] env[69027]: DEBUG oslo_concurrency.lockutils [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquiring lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.765995] env[69027]: DEBUG oslo_concurrency.lockutils [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.766256] env[69027]: DEBUG oslo_concurrency.lockutils [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1271.768355] env[69027]: INFO nova.compute.manager [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Terminating instance [ 1271.770351] env[69027]: DEBUG oslo_concurrency.lockutils [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquiring lock "refresh_cache-9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1271.770387] env[69027]: DEBUG oslo_concurrency.lockutils [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Acquired lock "refresh_cache-9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.770543] env[69027]: DEBUG nova.network.neutron [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1271.774243] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.774968] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.776130] env[69027]: INFO nova.compute.claims [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1271.780167] env[69027]: DEBUG nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1271.840424] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.051595] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4569762a-6cc2-4e5b-b34b-e72e6e199ec7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.059197] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a283b42-d002-46c7-8a82-df82c4b75824 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.092898] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-540b707b-48da-42a5-b771-8aaea6dd3053 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.100828] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0920cb0-b0ed-43a2-afc7-08c19259851b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.116561] env[69027]: DEBUG nova.compute.provider_tree [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1272.127339] env[69027]: DEBUG nova.scheduler.client.report [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1272.144477] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.370s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.144989] env[69027]: DEBUG nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1272.150148] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.310s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.151582] env[69027]: INFO nova.compute.claims [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1272.187632] env[69027]: DEBUG nova.compute.utils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1272.192109] env[69027]: DEBUG nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1272.192307] env[69027]: DEBUG nova.network.neutron [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1272.200985] env[69027]: DEBUG nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1272.271924] env[69027]: DEBUG nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1272.301091] env[69027]: DEBUG nova.virt.hardware [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1272.301091] env[69027]: DEBUG nova.virt.hardware [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1272.301091] env[69027]: DEBUG nova.virt.hardware [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1272.301247] env[69027]: DEBUG nova.virt.hardware [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1272.301247] env[69027]: DEBUG nova.virt.hardware [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1272.301247] env[69027]: DEBUG nova.virt.hardware [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1272.301247] env[69027]: DEBUG nova.virt.hardware [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1272.301247] env[69027]: DEBUG nova.virt.hardware [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1272.301363] env[69027]: DEBUG nova.virt.hardware [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1272.302086] env[69027]: DEBUG nova.virt.hardware [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1272.302526] env[69027]: DEBUG nova.virt.hardware [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1272.303825] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165bfe7b-e366-431f-b231-4a25980a1f7a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.310472] env[69027]: DEBUG nova.policy [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f570da9d13d14ffcb3c92c58adb89b18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94997c90fd1047dfb0959103e037feb5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1272.318394] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27994a2-d10f-47ef-9d85-4f5e803f8e7a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.463017] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1d3a7d-8055-4c92-9171-d2916786d407 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.469092] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbcaab01-6775-46ac-a1e7-5f0cd3f3f33a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.502853] env[69027]: DEBUG nova.network.neutron [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Updating instance_info_cache with network_info: [{"id": "05101b58-3f03-4460-a462-941d5e97a04f", "address": "fa:16:3e:be:21:e0", "network": {"id": "6d41d3fc-9dd1-425c-87f3-5e84a4dbdb65", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "baa826edead146bab87cd5ad749bedb8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78fd2e0c-4fd2-4d81-8780-aa94237670c0", "external-id": "cl2-zone-164", "segmentation_id": 164, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05101b58-3f", "ovs_interfaceid": "05101b58-3f03-4460-a462-941d5e97a04f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.505105] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f28240-aece-4a98-b46e-488cf6e0380f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.513587] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceae32fb-a696-4298-8468-7dc367e76b16 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.518869] env[69027]: DEBUG oslo_concurrency.lockutils [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Releasing lock "refresh_cache-9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1272.519548] env[69027]: DEBUG nova.compute.manager [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1272.519865] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1272.520822] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e31b6c4-86ed-4900-bcdc-8e07c6c68446 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.531176] env[69027]: DEBUG nova.compute.provider_tree [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1272.538805] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3c0578-9477-441e-b1f8-7edba55686f4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.551245] env[69027]: DEBUG nova.scheduler.client.report [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1272.572398] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b could not be found. [ 1272.572730] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1272.573027] env[69027]: INFO nova.compute.manager [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1272.573362] env[69027]: DEBUG oslo.service.loopingcall [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1272.574088] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.424s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.574545] env[69027]: DEBUG nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1272.577617] env[69027]: DEBUG nova.compute.manager [-] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1272.577617] env[69027]: DEBUG nova.network.neutron [-] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1272.607059] env[69027]: DEBUG nova.compute.utils [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1272.608614] env[69027]: DEBUG nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1272.608780] env[69027]: DEBUG nova.network.neutron [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1272.617991] env[69027]: DEBUG nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1272.649518] env[69027]: INFO nova.virt.block_device [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Booting with volume 95843e00-538d-48b4-a36f-b79ea086f08c at /dev/sda [ 1272.701824] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-74965b7e-7826-4fd3-94e2-e9f690fd2c8e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.707200] env[69027]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69027) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1272.707401] env[69027]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-0f3b71ea-913c-464b-bf33-7eef9d721ca4'] [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1272.708109] env[69027]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1272.708504] env[69027]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1272.708946] env[69027]: ERROR oslo.service.loopingcall [ 1272.709353] env[69027]: ERROR nova.compute.manager [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1272.714959] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1332a1d-f96a-4b16-a83e-f8573d3b26e1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.745067] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8982a0a9-1375-4d42-a339-6c23cafcef34 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.748424] env[69027]: ERROR nova.compute.manager [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1272.748424] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Traceback (most recent call last): [ 1272.748424] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.748424] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1272.748424] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1272.748424] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] exception_handler_v20(status_code, error_body) [ 1272.748424] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1272.748424] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise client_exc(message=error_message, [ 1272.748424] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1272.748424] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Neutron server returns request_ids: ['req-0f3b71ea-913c-464b-bf33-7eef9d721ca4'] [ 1272.748424] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] During handling of the above exception, another exception occurred: [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Traceback (most recent call last): [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._delete_instance(context, instance, bdms) [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._shutdown_instance(context, instance, bdms) [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._try_deallocate_network(context, instance, requested_networks) [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] with excutils.save_and_reraise_exception(): [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1272.748731] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self.force_reraise() [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise self.value [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] _deallocate_network_with_retries() [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return evt.wait() [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] result = hub.switch() [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.greenlet.switch() [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1272.749068] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] result = func(*self.args, **self.kw) [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] result = f(*args, **kwargs) [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._deallocate_network( [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self.network_api.deallocate_for_instance( [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] data = neutron.list_ports(**search_opts) [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.list('ports', self.ports_path, retrieve_all, [ 1272.749381] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] for r in self._pagination(collection, path, **params): [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] res = self.get(path, params=params) [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.retry_request("GET", action, body=body, [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1272.749706] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] return self.do_request(method, action, body=body, [ 1272.750029] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.750029] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] ret = obj(*args, **kwargs) [ 1272.750029] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1272.750029] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] self._handle_fault_response(status_code, replybody, resp) [ 1272.750029] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1272.750029] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1272.750029] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1272.750029] env[69027]: ERROR nova.compute.manager [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] [ 1272.758679] env[69027]: DEBUG nova.policy [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '927596b7ee4543a49ebe1dac5e1285d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0445b99712b34789b0cb5a8e8b4b11d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1272.765612] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab11ded9-a622-4c79-8be2-0afbd99cf564 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.787123] env[69027]: DEBUG oslo_concurrency.lockutils [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.022s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.788522] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 130.714s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.788772] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1272.789111] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "9eba7edc-c55f-423d-b1c3-3b6f12d28f2b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1272.802514] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b297d69-96bf-43ca-be93-9e8239884274 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.809978] env[69027]: DEBUG nova.network.neutron [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Successfully created port: 0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1272.812321] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fcdacf-ae76-4ed5-ab86-95dbf3393a85 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.827799] env[69027]: DEBUG nova.virt.block_device [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Updating existing volume attachment record: 31303b94-7eb5-4b13-ab47-c3da898e4ab6 {{(pid=69027) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 1272.861924] env[69027]: INFO nova.compute.manager [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] [instance: 9eba7edc-c55f-423d-b1c3-3b6f12d28f2b] Successfully reverted task state from None on failure for instance. [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server [None req-414cb494-052d-4a14-8508-3d596ccd9428 tempest-TenantUsagesTestJSON-1207288372 tempest-TenantUsagesTestJSON-1207288372-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-0f3b71ea-913c-464b-bf33-7eef9d721ca4'] [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1272.864947] env[69027]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1272.865462] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1272.865857] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1272.866281] env[69027]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.866738] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1272.867203] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1272.867759] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1272.867759] env[69027]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1272.867759] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1272.867759] env[69027]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1272.867759] env[69027]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1272.867759] env[69027]: ERROR oslo_messaging.rpc.server [ 1273.106121] env[69027]: DEBUG nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1273.107153] env[69027]: DEBUG nova.virt.hardware [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1273.107535] env[69027]: DEBUG nova.virt.hardware [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1273.107817] env[69027]: DEBUG nova.virt.hardware [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1273.108148] env[69027]: DEBUG nova.virt.hardware [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1273.108544] env[69027]: DEBUG nova.virt.hardware [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1273.108711] env[69027]: DEBUG nova.virt.hardware [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1273.108963] env[69027]: DEBUG nova.virt.hardware [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1273.109160] env[69027]: DEBUG nova.virt.hardware [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1273.109573] env[69027]: DEBUG nova.virt.hardware [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1273.109840] env[69027]: DEBUG nova.virt.hardware [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1273.110066] env[69027]: DEBUG nova.virt.hardware [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1273.111458] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e071f6-a138-4650-a7dd-6e03d047b560 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.121343] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c15a16f-fdbe-461c-b980-1c1e14fbd479 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.409638] env[69027]: DEBUG nova.network.neutron [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Successfully created port: fe4d1a2d-8552-459e-af4d-5200bb756718 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1273.850502] env[69027]: DEBUG nova.compute.manager [req-381ed0c6-c8e2-471e-9e26-c42e094f274f req-ea3e72c9-643a-4083-87e5-4b25b3b49125 service nova] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Received event network-vif-plugged-0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1273.850719] env[69027]: DEBUG oslo_concurrency.lockutils [req-381ed0c6-c8e2-471e-9e26-c42e094f274f req-ea3e72c9-643a-4083-87e5-4b25b3b49125 service nova] Acquiring lock "edc3a0ff-c592-47b8-9753-1b4831bee576-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1273.850926] env[69027]: DEBUG oslo_concurrency.lockutils [req-381ed0c6-c8e2-471e-9e26-c42e094f274f req-ea3e72c9-643a-4083-87e5-4b25b3b49125 service nova] Lock "edc3a0ff-c592-47b8-9753-1b4831bee576-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1273.851108] env[69027]: DEBUG oslo_concurrency.lockutils [req-381ed0c6-c8e2-471e-9e26-c42e094f274f req-ea3e72c9-643a-4083-87e5-4b25b3b49125 service nova] Lock "edc3a0ff-c592-47b8-9753-1b4831bee576-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1273.851280] env[69027]: DEBUG nova.compute.manager [req-381ed0c6-c8e2-471e-9e26-c42e094f274f req-ea3e72c9-643a-4083-87e5-4b25b3b49125 service nova] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] No waiting events found dispatching network-vif-plugged-0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1273.851445] env[69027]: WARNING nova.compute.manager [req-381ed0c6-c8e2-471e-9e26-c42e094f274f req-ea3e72c9-643a-4083-87e5-4b25b3b49125 service nova] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Received unexpected event network-vif-plugged-0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8 for instance with vm_state building and task_state spawning. [ 1273.958590] env[69027]: DEBUG nova.network.neutron [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Successfully updated port: 0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1273.968567] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquiring lock "refresh_cache-edc3a0ff-c592-47b8-9753-1b4831bee576" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1273.968927] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquired lock "refresh_cache-edc3a0ff-c592-47b8-9753-1b4831bee576" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.968927] env[69027]: DEBUG nova.network.neutron [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1274.047468] env[69027]: DEBUG nova.network.neutron [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1274.636688] env[69027]: DEBUG nova.network.neutron [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Updating instance_info_cache with network_info: [{"id": "0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8", "address": "fa:16:3e:8b:35:ce", "network": {"id": "af58bbbf-2658-4d0a-9cd5-61a65bac8053", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-2030352835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94997c90fd1047dfb0959103e037feb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e1dd50a-2c", "ovs_interfaceid": "0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.648540] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Releasing lock "refresh_cache-edc3a0ff-c592-47b8-9753-1b4831bee576" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1274.648819] env[69027]: DEBUG nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Instance network_info: |[{"id": "0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8", "address": "fa:16:3e:8b:35:ce", "network": {"id": "af58bbbf-2658-4d0a-9cd5-61a65bac8053", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-2030352835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94997c90fd1047dfb0959103e037feb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e1dd50a-2c", "ovs_interfaceid": "0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1274.649235] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:35:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11032cc2-b275-48d2-9c40-9455ea7d49e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1274.656728] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Creating folder: Project (94997c90fd1047dfb0959103e037feb5). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1274.657261] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-195040a2-5baf-4a9a-b6c5-37bc0c3dd475 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.666973] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Created folder: Project (94997c90fd1047dfb0959103e037feb5) in parent group-v677321. [ 1274.667174] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Creating folder: Instances. Parent ref: group-v677387. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1274.667402] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f4fb45e-fc9a-4563-b9ff-d0356ba65be9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.678704] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Created folder: Instances in parent group-v677387. [ 1274.678957] env[69027]: DEBUG oslo.service.loopingcall [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1274.679163] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1274.679372] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9eb200cb-8117-4656-bc1f-26b16fa833dd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.698565] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1274.698565] env[69027]: value = "task-3395174" [ 1274.698565] env[69027]: _type = "Task" [ 1274.698565] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.706231] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395174, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.761596] env[69027]: DEBUG nova.network.neutron [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Successfully updated port: fe4d1a2d-8552-459e-af4d-5200bb756718 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1274.773443] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Acquiring lock "refresh_cache-54340994-037e-4255-b32b-18d8784733c3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1274.773652] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Acquired lock "refresh_cache-54340994-037e-4255-b32b-18d8784733c3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1274.773834] env[69027]: DEBUG nova.network.neutron [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1274.826449] env[69027]: DEBUG nova.network.neutron [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1274.998493] env[69027]: DEBUG nova.network.neutron [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Updating instance_info_cache with network_info: [{"id": "fe4d1a2d-8552-459e-af4d-5200bb756718", "address": "fa:16:3e:ee:f5:cf", "network": {"id": "eb906e73-2ac6-461c-bb11-9db440f22293", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-674979554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0445b99712b34789b0cb5a8e8b4b11d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe4d1a2d-85", "ovs_interfaceid": "fe4d1a2d-8552-459e-af4d-5200bb756718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1275.011501] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Releasing lock "refresh_cache-54340994-037e-4255-b32b-18d8784733c3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1275.011792] env[69027]: DEBUG nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Instance network_info: |[{"id": "fe4d1a2d-8552-459e-af4d-5200bb756718", "address": "fa:16:3e:ee:f5:cf", "network": {"id": "eb906e73-2ac6-461c-bb11-9db440f22293", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-674979554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0445b99712b34789b0cb5a8e8b4b11d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe4d1a2d-85", "ovs_interfaceid": "fe4d1a2d-8552-459e-af4d-5200bb756718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1275.012332] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:f5:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe4d1a2d-8552-459e-af4d-5200bb756718', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1275.020586] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Creating folder: Project (0445b99712b34789b0cb5a8e8b4b11d8). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1275.020789] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96ed3484-503a-4a3b-8f2a-9e7d8eee8bc4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.034119] env[69027]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1275.034312] env[69027]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69027) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1275.034638] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Folder already exists: Project (0445b99712b34789b0cb5a8e8b4b11d8). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1275.034836] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Creating folder: Instances. Parent ref: group-v677375. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1275.035076] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c09d5daa-70e3-4575-aae9-837d42218b13 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.044427] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Created folder: Instances in parent group-v677375. [ 1275.044653] env[69027]: DEBUG oslo.service.loopingcall [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1275.044836] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54340994-037e-4255-b32b-18d8784733c3] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1275.045045] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-133ae45b-5a63-4352-871f-1b0e8131700e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.064907] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1275.064907] env[69027]: value = "task-3395177" [ 1275.064907] env[69027]: _type = "Task" [ 1275.064907] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.072181] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395177, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.208595] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395174, 'name': CreateVM_Task, 'duration_secs': 0.299239} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.208769] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1275.209481] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1275.209650] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.209988] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1275.210260] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-800e1763-9c2b-4f47-a8e4-ee7daef5ea38 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.214816] env[69027]: DEBUG oslo_vmware.api [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Waiting for the task: (returnval){ [ 1275.214816] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]529a273e-0b04-3001-fb7b-786837ccb9e9" [ 1275.214816] env[69027]: _type = "Task" [ 1275.214816] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.222434] env[69027]: DEBUG oslo_vmware.api [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]529a273e-0b04-3001-fb7b-786837ccb9e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.266207] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquiring lock "edc3a0ff-c592-47b8-9753-1b4831bee576" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.575681] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395177, 'name': CreateVM_Task, 'duration_secs': 0.282392} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.575858] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54340994-037e-4255-b32b-18d8784733c3] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1275.576560] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'boot_index': 0, 'disk_bus': None, 'delete_on_termination': True, 'device_type': None, 'guest_format': None, 'attachment_id': '31303b94-7eb5-4b13-ab47-c3da898e4ab6', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-677378', 'volume_id': '95843e00-538d-48b4-a36f-b79ea086f08c', 'name': 'volume-95843e00-538d-48b4-a36f-b79ea086f08c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '54340994-037e-4255-b32b-18d8784733c3', 'attached_at': '', 'detached_at': '', 'volume_id': '95843e00-538d-48b4-a36f-b79ea086f08c', 'serial': '95843e00-538d-48b4-a36f-b79ea086f08c'}, 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=69027) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1275.576791] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Root volume attach. Driver type: vmdk {{(pid=69027) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1275.577572] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc70e94-30e7-4cac-b7ae-c58f6872d8b8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.585644] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4858dc38-71c2-4015-a337-8d266edc92bf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.591532] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688164c7-b419-49d6-acfe-e5c792dddd67 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.597800] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-fd10c675-e66d-4ba9-a753-ca517197f66c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.605637] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Waiting for the task: (returnval){ [ 1275.605637] env[69027]: value = "task-3395178" [ 1275.605637] env[69027]: _type = "Task" [ 1275.605637] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.612784] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395178, 'name': RelocateVM_Task} progress is 5%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.725351] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1275.725700] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1275.725897] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1275.885313] env[69027]: DEBUG nova.compute.manager [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Received event network-changed-0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1275.885574] env[69027]: DEBUG nova.compute.manager [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Refreshing instance network info cache due to event network-changed-0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1275.885836] env[69027]: DEBUG oslo_concurrency.lockutils [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] Acquiring lock "refresh_cache-edc3a0ff-c592-47b8-9753-1b4831bee576" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1275.886089] env[69027]: DEBUG oslo_concurrency.lockutils [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] Acquired lock "refresh_cache-edc3a0ff-c592-47b8-9753-1b4831bee576" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.886222] env[69027]: DEBUG nova.network.neutron [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Refreshing network info cache for port 0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1276.119653] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395178, 'name': RelocateVM_Task, 'duration_secs': 0.34207} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.122990] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Volume attach. Driver type: vmdk {{(pid=69027) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1276.123311] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-677378', 'volume_id': '95843e00-538d-48b4-a36f-b79ea086f08c', 'name': 'volume-95843e00-538d-48b4-a36f-b79ea086f08c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '54340994-037e-4255-b32b-18d8784733c3', 'attached_at': '', 'detached_at': '', 'volume_id': '95843e00-538d-48b4-a36f-b79ea086f08c', 'serial': '95843e00-538d-48b4-a36f-b79ea086f08c'} {{(pid=69027) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1276.124307] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2b55d3-a39f-4f74-8c33-636a3456c8b5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.142901] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e297f89-40cc-458f-82c0-3ef2e2e17df8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.166234] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] volume-95843e00-538d-48b4-a36f-b79ea086f08c/volume-95843e00-538d-48b4-a36f-b79ea086f08c.vmdk or device None with type thin {{(pid=69027) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1276.169228] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5460e1c5-5826-46c1-998b-fcd2e29662a6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.188431] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Waiting for the task: (returnval){ [ 1276.188431] env[69027]: value = "task-3395179" [ 1276.188431] env[69027]: _type = "Task" [ 1276.188431] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.196684] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395179, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.279038] env[69027]: DEBUG nova.network.neutron [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Updated VIF entry in instance network info cache for port 0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1276.279665] env[69027]: DEBUG nova.network.neutron [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Updating instance_info_cache with network_info: [{"id": "0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8", "address": "fa:16:3e:8b:35:ce", "network": {"id": "af58bbbf-2658-4d0a-9cd5-61a65bac8053", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-2030352835-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94997c90fd1047dfb0959103e037feb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e1dd50a-2c", "ovs_interfaceid": "0e1dd50a-2ce1-432e-b153-2c5e8a4fc9d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.291783] env[69027]: DEBUG oslo_concurrency.lockutils [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] Releasing lock "refresh_cache-edc3a0ff-c592-47b8-9753-1b4831bee576" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.292146] env[69027]: DEBUG nova.compute.manager [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] Received event network-vif-plugged-fe4d1a2d-8552-459e-af4d-5200bb756718 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1276.292389] env[69027]: DEBUG oslo_concurrency.lockutils [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] Acquiring lock "54340994-037e-4255-b32b-18d8784733c3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.292626] env[69027]: DEBUG oslo_concurrency.lockutils [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] Lock "54340994-037e-4255-b32b-18d8784733c3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.292823] env[69027]: DEBUG oslo_concurrency.lockutils [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] Lock "54340994-037e-4255-b32b-18d8784733c3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.293040] env[69027]: DEBUG nova.compute.manager [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] No waiting events found dispatching network-vif-plugged-fe4d1a2d-8552-459e-af4d-5200bb756718 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1276.293244] env[69027]: WARNING nova.compute.manager [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] Received unexpected event network-vif-plugged-fe4d1a2d-8552-459e-af4d-5200bb756718 for instance with vm_state building and task_state spawning. [ 1276.293441] env[69027]: DEBUG nova.compute.manager [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] Received event network-changed-fe4d1a2d-8552-459e-af4d-5200bb756718 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1276.293630] env[69027]: DEBUG nova.compute.manager [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] Refreshing instance network info cache due to event network-changed-fe4d1a2d-8552-459e-af4d-5200bb756718. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1276.293843] env[69027]: DEBUG oslo_concurrency.lockutils [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] Acquiring lock "refresh_cache-54340994-037e-4255-b32b-18d8784733c3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.293993] env[69027]: DEBUG oslo_concurrency.lockutils [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] Acquired lock "refresh_cache-54340994-037e-4255-b32b-18d8784733c3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.294281] env[69027]: DEBUG nova.network.neutron [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] Refreshing network info cache for port fe4d1a2d-8552-459e-af4d-5200bb756718 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1276.654902] env[69027]: DEBUG nova.network.neutron [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] Updated VIF entry in instance network info cache for port fe4d1a2d-8552-459e-af4d-5200bb756718. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1276.655658] env[69027]: DEBUG nova.network.neutron [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] Updating instance_info_cache with network_info: [{"id": "fe4d1a2d-8552-459e-af4d-5200bb756718", "address": "fa:16:3e:ee:f5:cf", "network": {"id": "eb906e73-2ac6-461c-bb11-9db440f22293", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-674979554-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0445b99712b34789b0cb5a8e8b4b11d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe4d1a2d-85", "ovs_interfaceid": "fe4d1a2d-8552-459e-af4d-5200bb756718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.665826] env[69027]: DEBUG oslo_concurrency.lockutils [req-539751a0-b027-48f0-806e-14562fbcc682 req-0e813b50-ad5a-47c9-ae2a-32aee72690e4 service nova] Releasing lock "refresh_cache-54340994-037e-4255-b32b-18d8784733c3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.699687] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395179, 'name': ReconfigVM_Task, 'duration_secs': 0.268948} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.699981] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Reconfigured VM instance instance-0000004b to attach disk [datastore2] volume-95843e00-538d-48b4-a36f-b79ea086f08c/volume-95843e00-538d-48b4-a36f-b79ea086f08c.vmdk or device None with type thin {{(pid=69027) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1276.705261] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-425b155d-bc2a-4065-8d03-1f42a76ff221 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.720878] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Waiting for the task: (returnval){ [ 1276.720878] env[69027]: value = "task-3395180" [ 1276.720878] env[69027]: _type = "Task" [ 1276.720878] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.729501] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395180, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.232050] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395180, 'name': ReconfigVM_Task, 'duration_secs': 0.139163} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.232050] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-677378', 'volume_id': '95843e00-538d-48b4-a36f-b79ea086f08c', 'name': 'volume-95843e00-538d-48b4-a36f-b79ea086f08c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '54340994-037e-4255-b32b-18d8784733c3', 'attached_at': '', 'detached_at': '', 'volume_id': '95843e00-538d-48b4-a36f-b79ea086f08c', 'serial': '95843e00-538d-48b4-a36f-b79ea086f08c'} {{(pid=69027) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1277.232325] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b82e2862-0fce-40be-be75-a196fb997c52 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.239856] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Waiting for the task: (returnval){ [ 1277.239856] env[69027]: value = "task-3395181" [ 1277.239856] env[69027]: _type = "Task" [ 1277.239856] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.248523] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395181, 'name': Rename_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.749907] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395181, 'name': Rename_Task, 'duration_secs': 0.14042} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.750260] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Powering on the VM {{(pid=69027) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1277.750520] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7948288-d2dc-41e2-94b4-af42c58df8ef {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.757230] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Waiting for the task: (returnval){ [ 1277.757230] env[69027]: value = "task-3395182" [ 1277.757230] env[69027]: _type = "Task" [ 1277.757230] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.765563] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395182, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.048480] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Acquiring lock "54340994-037e-4255-b32b-18d8784733c3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.267857] env[69027]: DEBUG oslo_vmware.api [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395182, 'name': PowerOnVM_Task, 'duration_secs': 0.44084} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.268131] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Powered on the VM {{(pid=69027) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1278.268333] env[69027]: INFO nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Took 5.16 seconds to spawn the instance on the hypervisor. [ 1278.268585] env[69027]: DEBUG nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Checking state {{(pid=69027) _get_power_state /opt/stack/nova/nova/compute/manager.py:1782}} [ 1278.269361] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd903ba7-2c7d-4b41-af34-41313229d4db {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.310301] env[69027]: DEBUG nova.compute.utils [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Conflict updating instance 54340994-037e-4255-b32b-18d8784733c3. Expected: {'task_state': ['spawning']}. Actual: {'task_state': 'deleting'} {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1278.312421] env[69027]: DEBUG nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Instance disappeared during build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 1278.312616] env[69027]: DEBUG nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1278.312807] env[69027]: DEBUG nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1278.312984] env[69027]: DEBUG nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1278.313165] env[69027]: DEBUG nova.network.neutron [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1278.626855] env[69027]: DEBUG nova.network.neutron [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.638751] env[69027]: DEBUG nova.compute.manager [req-e0bb6069-42e5-43e3-9b58-f3af141194c8 req-c015b6b7-52e0-44c8-a42c-6848fedba047 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] Received event network-vif-deleted-fe4d1a2d-8552-459e-af4d-5200bb756718 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1278.639031] env[69027]: INFO nova.compute.manager [req-e0bb6069-42e5-43e3-9b58-f3af141194c8 req-c015b6b7-52e0-44c8-a42c-6848fedba047 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] Neutron deleted interface fe4d1a2d-8552-459e-af4d-5200bb756718; detaching it from the instance and deleting it from the info cache [ 1278.639228] env[69027]: DEBUG nova.network.neutron [req-e0bb6069-42e5-43e3-9b58-f3af141194c8 req-c015b6b7-52e0-44c8-a42c-6848fedba047 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.647090] env[69027]: INFO nova.compute.manager [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Took 0.33 seconds to deallocate network for instance. [ 1278.654332] env[69027]: DEBUG oslo_concurrency.lockutils [req-e0bb6069-42e5-43e3-9b58-f3af141194c8 req-c015b6b7-52e0-44c8-a42c-6848fedba047 service nova] Acquiring lock "54340994-037e-4255-b32b-18d8784733c3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.746261] env[69027]: INFO nova.scheduler.client.report [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Deleted allocations for instance 54340994-037e-4255-b32b-18d8784733c3 [ 1278.746557] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8068e968-3d9f-46b1-a372-ce7ae1317b93 tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Lock "54340994-037e-4255-b32b-18d8784733c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.727s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.747803] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Lock "54340994-037e-4255-b32b-18d8784733c3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.699s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.748769] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Acquiring lock "54340994-037e-4255-b32b-18d8784733c3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.748769] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Lock "54340994-037e-4255-b32b-18d8784733c3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.748769] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Lock "54340994-037e-4255-b32b-18d8784733c3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.754227] env[69027]: INFO nova.compute.manager [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Terminating instance [ 1278.761239] env[69027]: DEBUG nova.compute.manager [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1278.761239] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Powering off the VM {{(pid=69027) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1278.761239] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c70ed2ac-da73-4b1f-93f5-961e7cdde092 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.762420] env[69027]: DEBUG nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1278.770684] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Waiting for the task: (returnval){ [ 1278.770684] env[69027]: value = "task-3395183" [ 1278.770684] env[69027]: _type = "Task" [ 1278.770684] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.780103] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395183, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.821057] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.821309] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.822969] env[69027]: INFO nova.compute.claims [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1279.110871] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58904b2f-bc38-4b94-9f45-973d362b0a91 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.118018] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfef60d5-8faf-48b7-b74f-3f3e01aea689 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.150429] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d366aa4-cda6-4412-9ac5-14d45bda2160 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.157691] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbaac6fd-e74f-429a-885c-bec87059da43 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.171426] env[69027]: DEBUG nova.compute.provider_tree [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1279.183517] env[69027]: DEBUG nova.scheduler.client.report [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1279.200335] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.379s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.200814] env[69027]: DEBUG nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1279.239819] env[69027]: DEBUG nova.compute.utils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1279.241257] env[69027]: DEBUG nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1279.241369] env[69027]: DEBUG nova.network.neutron [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1279.251282] env[69027]: DEBUG nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1279.282644] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395183, 'name': PowerOffVM_Task, 'duration_secs': 0.195276} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.282951] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Powered off the VM {{(pid=69027) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1279.283184] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Volume detach. Driver type: vmdk {{(pid=69027) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1279.283386] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-677378', 'volume_id': '95843e00-538d-48b4-a36f-b79ea086f08c', 'name': 'volume-95843e00-538d-48b4-a36f-b79ea086f08c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '54340994-037e-4255-b32b-18d8784733c3', 'attached_at': '', 'detached_at': '', 'volume_id': '95843e00-538d-48b4-a36f-b79ea086f08c', 'serial': '95843e00-538d-48b4-a36f-b79ea086f08c'} {{(pid=69027) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1279.284210] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3f6574-d30c-4222-b607-45088f048395 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.305392] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f05ee6-dafa-47a1-89d7-94bb14643124 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.311156] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46864415-2455-4e1d-a81b-fe2245760019 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.315552] env[69027]: DEBUG nova.policy [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9cae5394177466e9afb1f8fa26e15ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ed36a72c2994c47a7313f7bbb37640a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1279.331925] env[69027]: DEBUG nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1279.333494] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ddb178-8d00-4656-bff7-2610e92dfb6f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.351083] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] The volume has not been displaced from its original location: [datastore2] volume-95843e00-538d-48b4-a36f-b79ea086f08c/volume-95843e00-538d-48b4-a36f-b79ea086f08c.vmdk. No consolidation needed. {{(pid=69027) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1279.356836] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Reconfiguring VM instance instance-0000004b to detach disk 2000 {{(pid=69027) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1279.358804] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-139a1b25-2264-4752-a250-88c29b54ec52 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.378574] env[69027]: DEBUG nova.virt.hardware [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1279.378810] env[69027]: DEBUG nova.virt.hardware [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1279.378983] env[69027]: DEBUG nova.virt.hardware [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1279.379171] env[69027]: DEBUG nova.virt.hardware [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1279.379309] env[69027]: DEBUG nova.virt.hardware [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1279.379459] env[69027]: DEBUG nova.virt.hardware [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1279.379665] env[69027]: DEBUG nova.virt.hardware [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1279.379828] env[69027]: DEBUG nova.virt.hardware [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1279.379993] env[69027]: DEBUG nova.virt.hardware [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1279.380175] env[69027]: DEBUG nova.virt.hardware [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1279.380351] env[69027]: DEBUG nova.virt.hardware [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1279.381182] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669d423a-6e12-4220-82d1-1ba137703285 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.384997] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Waiting for the task: (returnval){ [ 1279.384997] env[69027]: value = "task-3395184" [ 1279.384997] env[69027]: _type = "Task" [ 1279.384997] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.391750] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256d2fab-83f6-4fa6-b2bc-598ffab41e4c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.399029] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395184, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.703874] env[69027]: DEBUG nova.network.neutron [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Successfully created port: d6d381bc-9d2f-4345-aae5-3b91b8080e59 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1279.897158] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395184, 'name': ReconfigVM_Task, 'duration_secs': 0.143637} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.897158] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Reconfigured VM instance instance-0000004b to detach disk 2000 {{(pid=69027) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1279.902791] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c975e8e6-9a0a-4440-8e58-0c6e3b27ee9b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.919543] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Waiting for the task: (returnval){ [ 1279.919543] env[69027]: value = "task-3395185" [ 1279.919543] env[69027]: _type = "Task" [ 1279.919543] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.928988] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395185, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.004106] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "b930e792-b0a8-45e4-9330-befac22182b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.005661] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "b930e792-b0a8-45e4-9330-befac22182b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.429864] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395185, 'name': ReconfigVM_Task, 'duration_secs': 0.111563} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.430491] env[69027]: DEBUG nova.virt.vmwareapi.volumeops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-677378', 'volume_id': '95843e00-538d-48b4-a36f-b79ea086f08c', 'name': 'volume-95843e00-538d-48b4-a36f-b79ea086f08c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '54340994-037e-4255-b32b-18d8784733c3', 'attached_at': '', 'detached_at': '', 'volume_id': '95843e00-538d-48b4-a36f-b79ea086f08c', 'serial': '95843e00-538d-48b4-a36f-b79ea086f08c'} {{(pid=69027) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1280.430830] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1280.431627] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9196d4c8-c1f2-4a40-b1b3-845014dacb28 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.438564] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1280.438969] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2907f52-4398-4cc6-a649-f21d9e71473d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.499653] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1280.499885] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1280.500087] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Deleting the datastore file [datastore2] 54340994-037e-4255-b32b-18d8784733c3 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1280.500357] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4194f2ee-39df-4807-a9a8-c8910668e1b5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.510092] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Waiting for the task: (returnval){ [ 1280.510092] env[69027]: value = "task-3395187" [ 1280.510092] env[69027]: _type = "Task" [ 1280.510092] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.519185] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395187, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.662969] env[69027]: DEBUG nova.network.neutron [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Successfully updated port: d6d381bc-9d2f-4345-aae5-3b91b8080e59 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1280.677879] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "refresh_cache-1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1280.678050] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired lock "refresh_cache-1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.678240] env[69027]: DEBUG nova.network.neutron [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1280.717492] env[69027]: DEBUG nova.compute.manager [req-af9a7f70-cfbc-4026-8b8e-bed7a6fb5c69 req-59dd6301-7816-400c-aece-2725325592a0 service nova] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Received event network-vif-plugged-d6d381bc-9d2f-4345-aae5-3b91b8080e59 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1280.717721] env[69027]: DEBUG oslo_concurrency.lockutils [req-af9a7f70-cfbc-4026-8b8e-bed7a6fb5c69 req-59dd6301-7816-400c-aece-2725325592a0 service nova] Acquiring lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.717998] env[69027]: DEBUG oslo_concurrency.lockutils [req-af9a7f70-cfbc-4026-8b8e-bed7a6fb5c69 req-59dd6301-7816-400c-aece-2725325592a0 service nova] Lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.718205] env[69027]: DEBUG oslo_concurrency.lockutils [req-af9a7f70-cfbc-4026-8b8e-bed7a6fb5c69 req-59dd6301-7816-400c-aece-2725325592a0 service nova] Lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1280.718378] env[69027]: DEBUG nova.compute.manager [req-af9a7f70-cfbc-4026-8b8e-bed7a6fb5c69 req-59dd6301-7816-400c-aece-2725325592a0 service nova] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] No waiting events found dispatching network-vif-plugged-d6d381bc-9d2f-4345-aae5-3b91b8080e59 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1280.718544] env[69027]: WARNING nova.compute.manager [req-af9a7f70-cfbc-4026-8b8e-bed7a6fb5c69 req-59dd6301-7816-400c-aece-2725325592a0 service nova] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Received unexpected event network-vif-plugged-d6d381bc-9d2f-4345-aae5-3b91b8080e59 for instance with vm_state building and task_state spawning. [ 1280.751272] env[69027]: DEBUG nova.network.neutron [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1280.916506] env[69027]: DEBUG nova.network.neutron [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Updating instance_info_cache with network_info: [{"id": "d6d381bc-9d2f-4345-aae5-3b91b8080e59", "address": "fa:16:3e:d7:a8:9b", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6d381bc-9d", "ovs_interfaceid": "d6d381bc-9d2f-4345-aae5-3b91b8080e59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.927909] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Releasing lock "refresh_cache-1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.928221] env[69027]: DEBUG nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Instance network_info: |[{"id": "d6d381bc-9d2f-4345-aae5-3b91b8080e59", "address": "fa:16:3e:d7:a8:9b", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6d381bc-9d", "ovs_interfaceid": "d6d381bc-9d2f-4345-aae5-3b91b8080e59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1280.928627] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:a8:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6d381bc-9d2f-4345-aae5-3b91b8080e59', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1280.936069] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Creating folder: Project (4ed36a72c2994c47a7313f7bbb37640a). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1280.936574] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8bc8cc15-5edc-4b5c-af59-6c67b3ee4b1c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.947194] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Created folder: Project (4ed36a72c2994c47a7313f7bbb37640a) in parent group-v677321. [ 1280.947374] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Creating folder: Instances. Parent ref: group-v677392. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1280.947592] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1820e791-3fb0-4981-b75b-41e3ac549c78 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.956589] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Created folder: Instances in parent group-v677392. [ 1280.956808] env[69027]: DEBUG oslo.service.loopingcall [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1280.956980] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1280.957186] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce4a26be-1cc6-4f81-9c04-4f545d81f266 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.976124] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1280.976124] env[69027]: value = "task-3395190" [ 1280.976124] env[69027]: _type = "Task" [ 1280.976124] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.983673] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395190, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.017892] env[69027]: DEBUG oslo_vmware.api [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Task: {'id': task-3395187, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080957} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.018158] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1281.018344] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1281.018522] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1281.018693] env[69027]: INFO nova.compute.manager [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Took 2.26 seconds to destroy the instance on the hypervisor. [ 1281.018929] env[69027]: DEBUG oslo.service.loopingcall [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1281.019131] env[69027]: DEBUG nova.compute.manager [-] [instance: 54340994-037e-4255-b32b-18d8784733c3] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1281.019231] env[69027]: DEBUG nova.network.neutron [-] [instance: 54340994-037e-4255-b32b-18d8784733c3] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1281.049745] env[69027]: DEBUG nova.network.neutron [-] [instance: 54340994-037e-4255-b32b-18d8784733c3] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.059286] env[69027]: INFO nova.compute.manager [-] [instance: 54340994-037e-4255-b32b-18d8784733c3] Took 0.04 seconds to deallocate network for instance. [ 1281.126966] env[69027]: INFO nova.compute.manager [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Took 0.07 seconds to detach 1 volumes for instance. [ 1281.127940] env[69027]: DEBUG nova.compute.manager [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] [instance: 54340994-037e-4255-b32b-18d8784733c3] Deleting volume: 95843e00-538d-48b4-a36f-b79ea086f08c {{(pid=69027) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3238}} [ 1281.204043] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.204575] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.204617] env[69027]: DEBUG nova.objects.instance [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Lazy-loading 'resources' on Instance uuid 54340994-037e-4255-b32b-18d8784733c3 {{(pid=69027) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1281.419315] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d528c09-7e3c-4a36-a252-3c41a0fb4591 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.426957] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf288811-ebe1-4218-8f87-b2d4cc0f930b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.458212] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8975c831-2e64-4326-82e1-238c02bea89f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.465062] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c15366-0fce-48d5-851e-02a489a14a36 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.478119] env[69027]: DEBUG nova.compute.provider_tree [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.486956] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395190, 'name': CreateVM_Task, 'duration_secs': 0.314424} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.487681] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1281.488435] env[69027]: DEBUG nova.scheduler.client.report [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1281.491613] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.492516] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.492516] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1281.493420] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb549688-85c4-4fb1-9af5-11e0c6398c0d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.497086] env[69027]: DEBUG oslo_vmware.api [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for the task: (returnval){ [ 1281.497086] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]524c5da1-5773-0b87-8bcb-3f634bdcfab6" [ 1281.497086] env[69027]: _type = "Task" [ 1281.497086] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.505046] env[69027]: DEBUG oslo_vmware.api [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]524c5da1-5773-0b87-8bcb-3f634bdcfab6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.506190] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.302s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.569318] env[69027]: DEBUG oslo_concurrency.lockutils [None req-4277c291-dc0d-407d-afa8-072b794a0c0a tempest-ServerActionsV293TestJSON-252408414 tempest-ServerActionsV293TestJSON-252408414-project-member] Lock "54340994-037e-4255-b32b-18d8784733c3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.821s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1281.570395] env[69027]: DEBUG oslo_concurrency.lockutils [req-e0bb6069-42e5-43e3-9b58-f3af141194c8 req-c015b6b7-52e0-44c8-a42c-6848fedba047 service nova] Acquired lock "54340994-037e-4255-b32b-18d8784733c3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.571498] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc3baf6-729b-46ce-a8eb-05f540b29c84 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.579603] env[69027]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1281.579773] env[69027]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=69027) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1281.580403] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d327f9ba-d95c-4471-a35a-72ab8f62c09f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.589215] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15b6147-6f07-4b47-a35d-e21a3f66b35c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.619587] env[69027]: ERROR root [req-e0bb6069-42e5-43e3-9b58-f3af141194c8 req-c015b6b7-52e0-44c8-a42c-6848fedba047 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-677391' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-677391' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-677391' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-677391'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-677391' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-677391' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-677391'}\n"]: nova.exception.InstanceNotFound: Instance 54340994-037e-4255-b32b-18d8784733c3 could not be found. [ 1281.619805] env[69027]: DEBUG oslo_concurrency.lockutils [req-e0bb6069-42e5-43e3-9b58-f3af141194c8 req-c015b6b7-52e0-44c8-a42c-6848fedba047 service nova] Releasing lock "54340994-037e-4255-b32b-18d8784733c3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.620050] env[69027]: DEBUG nova.compute.manager [req-e0bb6069-42e5-43e3-9b58-f3af141194c8 req-c015b6b7-52e0-44c8-a42c-6848fedba047 service nova] [instance: 54340994-037e-4255-b32b-18d8784733c3] Detach interface failed, port_id=fe4d1a2d-8552-459e-af4d-5200bb756718, reason: Instance 54340994-037e-4255-b32b-18d8784733c3 could not be found. {{(pid=69027) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10956}} [ 1282.007646] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1282.007972] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1282.008056] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.749266] env[69027]: DEBUG nova.compute.manager [req-190c97fa-22c9-4276-8fdb-e5a4407b91a6 req-0b9659bc-65d7-4599-80f5-df4990d2683f service nova] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Received event network-changed-d6d381bc-9d2f-4345-aae5-3b91b8080e59 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1282.749974] env[69027]: DEBUG nova.compute.manager [req-190c97fa-22c9-4276-8fdb-e5a4407b91a6 req-0b9659bc-65d7-4599-80f5-df4990d2683f service nova] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Refreshing instance network info cache due to event network-changed-d6d381bc-9d2f-4345-aae5-3b91b8080e59. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1282.749974] env[69027]: DEBUG oslo_concurrency.lockutils [req-190c97fa-22c9-4276-8fdb-e5a4407b91a6 req-0b9659bc-65d7-4599-80f5-df4990d2683f service nova] Acquiring lock "refresh_cache-1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.749974] env[69027]: DEBUG oslo_concurrency.lockutils [req-190c97fa-22c9-4276-8fdb-e5a4407b91a6 req-0b9659bc-65d7-4599-80f5-df4990d2683f service nova] Acquired lock "refresh_cache-1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.749974] env[69027]: DEBUG nova.network.neutron [req-190c97fa-22c9-4276-8fdb-e5a4407b91a6 req-0b9659bc-65d7-4599-80f5-df4990d2683f service nova] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Refreshing network info cache for port d6d381bc-9d2f-4345-aae5-3b91b8080e59 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1283.079831] env[69027]: DEBUG nova.network.neutron [req-190c97fa-22c9-4276-8fdb-e5a4407b91a6 req-0b9659bc-65d7-4599-80f5-df4990d2683f service nova] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Updated VIF entry in instance network info cache for port d6d381bc-9d2f-4345-aae5-3b91b8080e59. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1283.080212] env[69027]: DEBUG nova.network.neutron [req-190c97fa-22c9-4276-8fdb-e5a4407b91a6 req-0b9659bc-65d7-4599-80f5-df4990d2683f service nova] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Updating instance_info_cache with network_info: [{"id": "d6d381bc-9d2f-4345-aae5-3b91b8080e59", "address": "fa:16:3e:d7:a8:9b", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6d381bc-9d", "ovs_interfaceid": "d6d381bc-9d2f-4345-aae5-3b91b8080e59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.090834] env[69027]: DEBUG oslo_concurrency.lockutils [req-190c97fa-22c9-4276-8fdb-e5a4407b91a6 req-0b9659bc-65d7-4599-80f5-df4990d2683f service nova] Releasing lock "refresh_cache-1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1288.920687] env[69027]: DEBUG oslo_concurrency.lockutils [None req-3f8ced9e-1cdd-487e-91f8-81d81e8e4d95 tempest-ServerRescueTestJSON-1099885945 tempest-ServerRescueTestJSON-1099885945-project-member] Acquiring lock "fc2ddcd1-d7cf-45d3-903f-247b00f48f2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1288.922444] env[69027]: DEBUG oslo_concurrency.lockutils [None req-3f8ced9e-1cdd-487e-91f8-81d81e8e4d95 tempest-ServerRescueTestJSON-1099885945 tempest-ServerRescueTestJSON-1099885945-project-member] Lock "fc2ddcd1-d7cf-45d3-903f-247b00f48f2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.769853] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1316.771883] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1316.783823] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.784067] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.784247] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.784406] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1316.785548] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27fc700b-f824-4164-9188-ed701def5c39 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.794412] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f433853-201a-4ff9-9ed9-6d8fd7ba238b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.808793] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5b9a41-c5cd-4ad1-acec-41205c162d79 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.815134] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac95b91-9f55-4aa8-8b80-d5d03b83b597 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.847081] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180977MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1316.847245] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1316.847451] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1316.929647] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4ed95b65-233e-406e-8d27-2a5cd2694184 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.929813] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fbd6a238-1662-4c22-86ab-d31d4bb82734 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.929939] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.930072] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.930193] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bf4c80b4-bc0c-4198-9010-74fc50707745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.930309] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.930422] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a907f1ab-3540-4bc0-8389-005233cca940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.930533] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1715faa2-86ea-49f9-a993-1003aea54384 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.930644] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance edc3a0ff-c592-47b8-9753-1b4831bee576 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.930754] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1316.941542] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1316.951869] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1316.963493] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03d9d361-da15-4fb7-acfb-049098183bc3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1316.973730] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3054e7f1-4a02-47ca-91fd-4d8669004e8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1316.983991] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c6a011b8-3c47-4e37-a9f1-e36a546048ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1316.994335] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bfe81ca7-70dc-4e48-9f8b-afa901baec0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1317.004112] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b930e792-b0a8-45e4-9330-befac22182b7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1317.013826] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fc2ddcd1-d7cf-45d3-903f-247b00f48f2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1317.014073] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1317.014221] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1317.212799] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba859d86-4f1b-4653-8470-b2c860d106e7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.220658] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed240dfa-69a8-457b-b1fc-5139d4f0dcea {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.249871] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a022afc2-bf98-4ef5-ba60-e6c4777ef299 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.256862] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77704394-784d-464c-9630-4f8bebdd7a9d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.270212] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1317.279099] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1317.294428] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1317.294648] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.447s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.469700] env[69027]: WARNING oslo_vmware.rw_handles [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1317.469700] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1317.469700] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1317.469700] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1317.469700] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1317.469700] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1317.469700] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1317.469700] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1317.469700] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1317.469700] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1317.469700] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1317.469700] env[69027]: ERROR oslo_vmware.rw_handles [ 1317.470082] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/682ed62c-f9e5-492d-a858-f6346ed99285/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1317.472168] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1317.472425] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Copying Virtual Disk [datastore2] vmware_temp/682ed62c-f9e5-492d-a858-f6346ed99285/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/682ed62c-f9e5-492d-a858-f6346ed99285/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1317.472722] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c8373c1-b268-4117-97ed-07f297a4952e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.480979] env[69027]: DEBUG oslo_vmware.api [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for the task: (returnval){ [ 1317.480979] env[69027]: value = "task-3395192" [ 1317.480979] env[69027]: _type = "Task" [ 1317.480979] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.488637] env[69027]: DEBUG oslo_vmware.api [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Task: {'id': task-3395192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.991270] env[69027]: DEBUG oslo_vmware.exceptions [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1317.991630] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1317.992083] env[69027]: ERROR nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1317.992083] env[69027]: Faults: ['InvalidArgument'] [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Traceback (most recent call last): [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] yield resources [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] self.driver.spawn(context, instance, image_meta, [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] self._fetch_image_if_missing(context, vi) [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] image_cache(vi, tmp_image_ds_loc) [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] vm_util.copy_virtual_disk( [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] session._wait_for_task(vmdk_copy_task) [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] return self.wait_for_task(task_ref) [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] return evt.wait() [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] result = hub.switch() [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] return self.greenlet.switch() [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] self.f(*self.args, **self.kw) [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] raise exceptions.translate_fault(task_info.error) [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Faults: ['InvalidArgument'] [ 1317.992083] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] [ 1317.992890] env[69027]: INFO nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Terminating instance [ 1317.994206] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.994419] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1317.994711] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4f31b2a-ef93-487d-9467-563e8cb8c9b5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.997087] env[69027]: DEBUG nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1317.997297] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1317.998047] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75775fba-08f5-4aaf-a77b-d5dd9d0ef221 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.004984] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1318.005218] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f7f2166-f37e-4aac-9e6a-f0e289fd0fea {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.007403] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1318.007577] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1318.008501] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc991f77-3b10-4980-bc7f-1d0250876e7c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.013160] env[69027]: DEBUG oslo_vmware.api [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for the task: (returnval){ [ 1318.013160] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52afdbf7-3bf3-2721-7e02-a8de6333223d" [ 1318.013160] env[69027]: _type = "Task" [ 1318.013160] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.023450] env[69027]: DEBUG oslo_vmware.api [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52afdbf7-3bf3-2721-7e02-a8de6333223d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.079330] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1318.079558] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1318.079737] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Deleting the datastore file [datastore2] 4ed95b65-233e-406e-8d27-2a5cd2694184 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1318.080022] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0199a6e-259f-4a44-be42-50634612065c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.086553] env[69027]: DEBUG oslo_vmware.api [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for the task: (returnval){ [ 1318.086553] env[69027]: value = "task-3395194" [ 1318.086553] env[69027]: _type = "Task" [ 1318.086553] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.094529] env[69027]: DEBUG oslo_vmware.api [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Task: {'id': task-3395194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.294498] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1318.294498] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1318.294739] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1318.314792] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1318.314963] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1318.315111] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1318.315243] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1318.315368] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1318.315489] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1318.315633] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1318.315759] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1318.315877] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1318.316030] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1318.316120] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1318.316647] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1318.528049] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1318.528049] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Creating directory with path [datastore2] vmware_temp/c5b72c6a-a540-4af6-a06f-b382617dd08c/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1318.528049] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b7355e9-dbf9-4651-b6ec-9105fd1052a6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.537692] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Created directory with path [datastore2] vmware_temp/c5b72c6a-a540-4af6-a06f-b382617dd08c/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1318.537936] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Fetch image to [datastore2] vmware_temp/c5b72c6a-a540-4af6-a06f-b382617dd08c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1318.538254] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/c5b72c6a-a540-4af6-a06f-b382617dd08c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1318.539281] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fa7888-985b-4cd4-80c9-301ca3b274f5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.547650] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35392d3-be1d-4601-bf03-a6c02e6520b2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.559429] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e87e56-9e3a-453d-961b-d6f8efa2e8ad {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.594635] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9272839-1dc1-49b1-a614-62967577980b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.601687] env[69027]: DEBUG oslo_vmware.api [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Task: {'id': task-3395194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066629} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.603142] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1318.603337] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1318.603513] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1318.603797] env[69027]: INFO nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1318.605491] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c6c58238-bf1c-4523-b69a-0587061bbe9f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.607313] env[69027]: DEBUG nova.compute.claims [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1318.607489] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.607695] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.628696] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1318.682732] env[69027]: DEBUG oslo_vmware.rw_handles [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c5b72c6a-a540-4af6-a06f-b382617dd08c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1318.744145] env[69027]: DEBUG oslo_vmware.rw_handles [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1318.744339] env[69027]: DEBUG oslo_vmware.rw_handles [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c5b72c6a-a540-4af6-a06f-b382617dd08c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1318.898863] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5986db8f-256b-422d-9c03-510cf9c2f7da {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.906472] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ad98d5-6287-488e-8336-1d43fb7f75ef {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.935174] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5217aa92-ea06-460c-bf1e-35f10578625d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.942128] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bdc0c6c-c5f0-46aa-995c-fc3f222f6ee1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.955775] env[69027]: DEBUG nova.compute.provider_tree [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.965174] env[69027]: DEBUG nova.scheduler.client.report [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1318.978740] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.371s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1318.979285] env[69027]: ERROR nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1318.979285] env[69027]: Faults: ['InvalidArgument'] [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Traceback (most recent call last): [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] self.driver.spawn(context, instance, image_meta, [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] self._fetch_image_if_missing(context, vi) [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] image_cache(vi, tmp_image_ds_loc) [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] vm_util.copy_virtual_disk( [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] session._wait_for_task(vmdk_copy_task) [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] return self.wait_for_task(task_ref) [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] return evt.wait() [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] result = hub.switch() [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] return self.greenlet.switch() [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] self.f(*self.args, **self.kw) [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] raise exceptions.translate_fault(task_info.error) [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Faults: ['InvalidArgument'] [ 1318.979285] env[69027]: ERROR nova.compute.manager [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] [ 1318.980226] env[69027]: DEBUG nova.compute.utils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1318.981392] env[69027]: DEBUG nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Build of instance 4ed95b65-233e-406e-8d27-2a5cd2694184 was re-scheduled: A specified parameter was not correct: fileType [ 1318.981392] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1318.981776] env[69027]: DEBUG nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1318.981956] env[69027]: DEBUG nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1318.982189] env[69027]: DEBUG nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1318.982374] env[69027]: DEBUG nova.network.neutron [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1319.277502] env[69027]: DEBUG nova.network.neutron [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.293012] env[69027]: INFO nova.compute.manager [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Took 0.31 seconds to deallocate network for instance. [ 1319.391727] env[69027]: INFO nova.scheduler.client.report [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Deleted allocations for instance 4ed95b65-233e-406e-8d27-2a5cd2694184 [ 1319.420279] env[69027]: DEBUG oslo_concurrency.lockutils [None req-91912ca7-2d59-4609-8f21-080ab2a1d2e3 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "4ed95b65-233e-406e-8d27-2a5cd2694184" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 585.317s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.421478] env[69027]: DEBUG oslo_concurrency.lockutils [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "4ed95b65-233e-406e-8d27-2a5cd2694184" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 191.470s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.421699] env[69027]: DEBUG oslo_concurrency.lockutils [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "4ed95b65-233e-406e-8d27-2a5cd2694184-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.421907] env[69027]: DEBUG oslo_concurrency.lockutils [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "4ed95b65-233e-406e-8d27-2a5cd2694184-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.422089] env[69027]: DEBUG oslo_concurrency.lockutils [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "4ed95b65-233e-406e-8d27-2a5cd2694184-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.425012] env[69027]: INFO nova.compute.manager [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Terminating instance [ 1319.426038] env[69027]: DEBUG nova.compute.manager [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1319.426208] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1319.426873] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8d1a605-0a92-473f-b875-31510e4df8c8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.435565] env[69027]: DEBUG nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1319.441389] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f642fd-6135-46c4-976d-06ba89c55f14 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.471024] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4ed95b65-233e-406e-8d27-2a5cd2694184 could not be found. [ 1319.471254] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1319.471436] env[69027]: INFO nova.compute.manager [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1319.471684] env[69027]: DEBUG oslo.service.loopingcall [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1319.476105] env[69027]: DEBUG nova.compute.manager [-] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1319.476213] env[69027]: DEBUG nova.network.neutron [-] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1319.487758] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.487992] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.489438] env[69027]: INFO nova.compute.claims [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1319.515594] env[69027]: DEBUG nova.network.neutron [-] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.533803] env[69027]: INFO nova.compute.manager [-] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] Took 0.06 seconds to deallocate network for instance. [ 1319.620229] env[69027]: DEBUG oslo_concurrency.lockutils [None req-433bb242-884f-4b9b-8088-417b27c3a161 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "4ed95b65-233e-406e-8d27-2a5cd2694184" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.199s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.621922] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "4ed95b65-233e-406e-8d27-2a5cd2694184" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 177.548s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.622149] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4ed95b65-233e-406e-8d27-2a5cd2694184] During sync_power_state the instance has a pending task (deleting). Skip. [ 1319.622332] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "4ed95b65-233e-406e-8d27-2a5cd2694184" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.736733] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82fb5ba0-968b-4b73-8a9c-e681b652fd85 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.744014] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f3c9af-fafa-417a-aa8a-8288e29750c5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.772600] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1319.773548] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-542edba4-291c-4ec5-bb44-79c95dcfa19e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.780138] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b064d2-d791-4724-ae30-1d33e4df9f50 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.792596] env[69027]: DEBUG nova.compute.provider_tree [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1319.801063] env[69027]: DEBUG nova.scheduler.client.report [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1319.814502] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.326s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.814986] env[69027]: DEBUG nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1319.846506] env[69027]: DEBUG nova.compute.utils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1319.848022] env[69027]: DEBUG nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1319.848148] env[69027]: DEBUG nova.network.neutron [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1319.856812] env[69027]: DEBUG nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1319.904042] env[69027]: DEBUG nova.policy [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64e2a52fd3564ac08c4a4359023ce373', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b751370bba1b4705957897afbd64490c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1319.921593] env[69027]: DEBUG nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1319.945729] env[69027]: DEBUG nova.virt.hardware [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1319.945951] env[69027]: DEBUG nova.virt.hardware [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1319.946128] env[69027]: DEBUG nova.virt.hardware [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1319.946314] env[69027]: DEBUG nova.virt.hardware [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1319.946462] env[69027]: DEBUG nova.virt.hardware [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1319.946610] env[69027]: DEBUG nova.virt.hardware [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1319.946814] env[69027]: DEBUG nova.virt.hardware [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1319.946976] env[69027]: DEBUG nova.virt.hardware [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1319.947157] env[69027]: DEBUG nova.virt.hardware [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1319.947320] env[69027]: DEBUG nova.virt.hardware [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1319.947493] env[69027]: DEBUG nova.virt.hardware [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1319.948378] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8902f2a-8084-4014-9e52-7fe301a734a5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.956370] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b90bbea-3fd3-45c2-9778-daf554fe74a5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.218653] env[69027]: DEBUG nova.network.neutron [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Successfully created port: e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1321.169663] env[69027]: DEBUG nova.network.neutron [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Successfully updated port: e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1321.183520] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "refresh_cache-f981fe25-52bd-46e7-920e-1f73ca37d9a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1321.183688] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquired lock "refresh_cache-f981fe25-52bd-46e7-920e-1f73ca37d9a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.183823] env[69027]: DEBUG nova.network.neutron [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1321.259182] env[69027]: DEBUG nova.network.neutron [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1321.342102] env[69027]: DEBUG nova.compute.manager [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Received event network-vif-plugged-e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1321.342102] env[69027]: DEBUG oslo_concurrency.lockutils [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] Acquiring lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.342102] env[69027]: DEBUG oslo_concurrency.lockutils [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] Lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.342102] env[69027]: DEBUG oslo_concurrency.lockutils [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] Lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.342102] env[69027]: DEBUG nova.compute.manager [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] No waiting events found dispatching network-vif-plugged-e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1321.342643] env[69027]: WARNING nova.compute.manager [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Received unexpected event network-vif-plugged-e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae for instance with vm_state building and task_state spawning. [ 1321.343014] env[69027]: DEBUG nova.compute.manager [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Received event network-changed-e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1321.343360] env[69027]: DEBUG nova.compute.manager [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Refreshing instance network info cache due to event network-changed-e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1321.343798] env[69027]: DEBUG oslo_concurrency.lockutils [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] Acquiring lock "refresh_cache-f981fe25-52bd-46e7-920e-1f73ca37d9a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1321.438930] env[69027]: DEBUG nova.network.neutron [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Updating instance_info_cache with network_info: [{"id": "e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae", "address": "fa:16:3e:04:10:92", "network": {"id": "b0ef06f2-c074-4d90-b63b-6f6f3c5bd860", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1122167395-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b751370bba1b4705957897afbd64490c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4c2cd53-43", "ovs_interfaceid": "e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.452414] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Releasing lock "refresh_cache-f981fe25-52bd-46e7-920e-1f73ca37d9a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1321.452866] env[69027]: DEBUG nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Instance network_info: |[{"id": "e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae", "address": "fa:16:3e:04:10:92", "network": {"id": "b0ef06f2-c074-4d90-b63b-6f6f3c5bd860", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1122167395-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b751370bba1b4705957897afbd64490c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4c2cd53-43", "ovs_interfaceid": "e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1321.453235] env[69027]: DEBUG oslo_concurrency.lockutils [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] Acquired lock "refresh_cache-f981fe25-52bd-46e7-920e-1f73ca37d9a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.453426] env[69027]: DEBUG nova.network.neutron [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Refreshing network info cache for port e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1321.454505] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:10:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8812601-ae67-4e0d-b9a2-710b86c53ac5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1321.461981] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Creating folder: Project (b751370bba1b4705957897afbd64490c). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1321.465398] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07aa624b-3754-488a-be41-1ba88378fba2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.476572] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Created folder: Project (b751370bba1b4705957897afbd64490c) in parent group-v677321. [ 1321.476762] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Creating folder: Instances. Parent ref: group-v677395. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1321.477108] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbce9a4c-ff9b-428c-a8a8-43fe4890f934 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.486661] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Created folder: Instances in parent group-v677395. [ 1321.486901] env[69027]: DEBUG oslo.service.loopingcall [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1321.487099] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1321.487301] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-369d4318-7e9f-4a6a-a9a4-10c47b0ce268 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.508309] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1321.508309] env[69027]: value = "task-3395197" [ 1321.508309] env[69027]: _type = "Task" [ 1321.508309] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.516446] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395197, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.772048] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1321.809045] env[69027]: DEBUG nova.network.neutron [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Updated VIF entry in instance network info cache for port e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1321.809503] env[69027]: DEBUG nova.network.neutron [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Updating instance_info_cache with network_info: [{"id": "e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae", "address": "fa:16:3e:04:10:92", "network": {"id": "b0ef06f2-c074-4d90-b63b-6f6f3c5bd860", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1122167395-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b751370bba1b4705957897afbd64490c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4c2cd53-43", "ovs_interfaceid": "e4c2cd53-43ce-4bd9-ae53-3e5d6a9de6ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.819849] env[69027]: DEBUG oslo_concurrency.lockutils [req-7a00c1e6-a7d8-46d4-9b2d-424103a47a38 req-c7d3e8b1-d646-43e5-a57f-714b23cfe207 service nova] Releasing lock "refresh_cache-f981fe25-52bd-46e7-920e-1f73ca37d9a3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.018367] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395197, 'name': CreateVM_Task, 'duration_secs': 0.292064} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.018551] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1322.019323] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.019487] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.019807] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1322.020066] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4a56317-2eff-4880-a593-463dc3826d50 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.024584] env[69027]: DEBUG oslo_vmware.api [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for the task: (returnval){ [ 1322.024584] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5221d4b4-1826-332b-3bc4-2c99ad1b162b" [ 1322.024584] env[69027]: _type = "Task" [ 1322.024584] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.031963] env[69027]: DEBUG oslo_vmware.api [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5221d4b4-1826-332b-3bc4-2c99ad1b162b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.534726] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.535054] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1322.535278] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.771639] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.771965] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1323.772467] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.197255] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.771422] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1364.232912] env[69027]: WARNING oslo_vmware.rw_handles [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1364.232912] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1364.232912] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1364.232912] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1364.232912] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1364.232912] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1364.232912] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1364.232912] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1364.232912] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1364.232912] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1364.232912] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1364.232912] env[69027]: ERROR oslo_vmware.rw_handles [ 1364.233526] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/c5b72c6a-a540-4af6-a06f-b382617dd08c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1364.235466] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1364.235767] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Copying Virtual Disk [datastore2] vmware_temp/c5b72c6a-a540-4af6-a06f-b382617dd08c/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/c5b72c6a-a540-4af6-a06f-b382617dd08c/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1364.236125] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-baddf112-47a6-4661-97f8-67e3ac6e54d6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.243501] env[69027]: DEBUG oslo_vmware.api [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for the task: (returnval){ [ 1364.243501] env[69027]: value = "task-3395198" [ 1364.243501] env[69027]: _type = "Task" [ 1364.243501] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.251782] env[69027]: DEBUG oslo_vmware.api [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Task: {'id': task-3395198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.753506] env[69027]: DEBUG oslo_vmware.exceptions [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1364.753806] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.754408] env[69027]: ERROR nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1364.754408] env[69027]: Faults: ['InvalidArgument'] [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Traceback (most recent call last): [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] yield resources [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self.driver.spawn(context, instance, image_meta, [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._fetch_image_if_missing(context, vi) [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] image_cache(vi, tmp_image_ds_loc) [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] vm_util.copy_virtual_disk( [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] session._wait_for_task(vmdk_copy_task) [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.wait_for_task(task_ref) [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return evt.wait() [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] result = hub.switch() [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.greenlet.switch() [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self.f(*self.args, **self.kw) [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] raise exceptions.translate_fault(task_info.error) [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Faults: ['InvalidArgument'] [ 1364.754408] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1364.755296] env[69027]: INFO nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Terminating instance [ 1364.757588] env[69027]: DEBUG nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1364.757788] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1364.758081] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.758289] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1364.759017] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34c935b-3d08-44f2-9db2-52778ebd52a3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.761597] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52bb396d-00a2-4d33-a38a-0713d606c34a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.767759] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1364.767991] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a64522b0-a181-431d-ba18-43d868d67f7b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.770250] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1364.770426] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1364.771374] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82493a3f-33f1-4e72-88b7-46b3d51351fb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.776071] env[69027]: DEBUG oslo_vmware.api [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Waiting for the task: (returnval){ [ 1364.776071] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]524b7ba5-b179-63ff-cd60-0451fa972d0b" [ 1364.776071] env[69027]: _type = "Task" [ 1364.776071] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.783979] env[69027]: DEBUG oslo_vmware.api [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]524b7ba5-b179-63ff-cd60-0451fa972d0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.836142] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1364.836414] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1364.836688] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Deleting the datastore file [datastore2] fbd6a238-1662-4c22-86ab-d31d4bb82734 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1364.836875] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2d6546f-5462-4a3c-aaf3-b1aa2f782fb5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.843434] env[69027]: DEBUG oslo_vmware.api [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for the task: (returnval){ [ 1364.843434] env[69027]: value = "task-3395200" [ 1364.843434] env[69027]: _type = "Task" [ 1364.843434] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.851016] env[69027]: DEBUG oslo_vmware.api [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Task: {'id': task-3395200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.287031] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1365.287031] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Creating directory with path [datastore2] vmware_temp/c7ceb0d7-496c-4747-8e97-fa5a41aa8c37/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1365.287371] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-922b0731-e86f-441b-81e4-1f239e42d8af {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.298353] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Created directory with path [datastore2] vmware_temp/c7ceb0d7-496c-4747-8e97-fa5a41aa8c37/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1365.298604] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Fetch image to [datastore2] vmware_temp/c7ceb0d7-496c-4747-8e97-fa5a41aa8c37/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1365.298820] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/c7ceb0d7-496c-4747-8e97-fa5a41aa8c37/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1365.299767] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d6fa7e-f58c-4661-8263-9a85086f348d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.306718] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79a5f40-421b-48a6-877a-cb7414026bbf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.316053] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3e5942-4231-419a-bfcc-1aea27c8962a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.350168] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040c6cc7-128c-47e3-a4e0-1a72ae9bc123 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.357089] env[69027]: DEBUG oslo_vmware.api [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Task: {'id': task-3395200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082596} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.358478] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1365.358673] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1365.358846] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1365.359038] env[69027]: INFO nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1365.360779] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-87962376-6a83-418a-a6fe-3f5e93714b49 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.362574] env[69027]: DEBUG nova.compute.claims [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1365.362750] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.362972] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.383532] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1365.433911] env[69027]: DEBUG oslo_vmware.rw_handles [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c7ceb0d7-496c-4747-8e97-fa5a41aa8c37/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1365.497483] env[69027]: DEBUG oslo_vmware.rw_handles [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1365.497677] env[69027]: DEBUG oslo_vmware.rw_handles [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c7ceb0d7-496c-4747-8e97-fa5a41aa8c37/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1365.642807] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f952c89-4381-4b27-b0ca-f9d92bc96c55 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.650929] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a937b88-c260-4f89-b8c4-edfb00d9834a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.680482] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35838136-540c-42e4-9510-e280b0583b12 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.689488] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4982fa-2964-46a9-9ab4-4b6568a1d645 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.702212] env[69027]: DEBUG nova.compute.provider_tree [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1365.710380] env[69027]: DEBUG nova.scheduler.client.report [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1365.726517] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.363s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1365.727109] env[69027]: ERROR nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1365.727109] env[69027]: Faults: ['InvalidArgument'] [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Traceback (most recent call last): [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self.driver.spawn(context, instance, image_meta, [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._fetch_image_if_missing(context, vi) [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] image_cache(vi, tmp_image_ds_loc) [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] vm_util.copy_virtual_disk( [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] session._wait_for_task(vmdk_copy_task) [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.wait_for_task(task_ref) [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return evt.wait() [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] result = hub.switch() [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.greenlet.switch() [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self.f(*self.args, **self.kw) [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] raise exceptions.translate_fault(task_info.error) [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Faults: ['InvalidArgument'] [ 1365.727109] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1365.728287] env[69027]: DEBUG nova.compute.utils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1365.729688] env[69027]: DEBUG nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Build of instance fbd6a238-1662-4c22-86ab-d31d4bb82734 was re-scheduled: A specified parameter was not correct: fileType [ 1365.729688] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1365.730140] env[69027]: DEBUG nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1365.730363] env[69027]: DEBUG nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1365.730563] env[69027]: DEBUG nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1365.730825] env[69027]: DEBUG nova.network.neutron [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1365.898141] env[69027]: DEBUG neutronclient.v2_0.client [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69027) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1365.899189] env[69027]: ERROR nova.compute.manager [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Traceback (most recent call last): [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self.driver.spawn(context, instance, image_meta, [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._fetch_image_if_missing(context, vi) [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] image_cache(vi, tmp_image_ds_loc) [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] vm_util.copy_virtual_disk( [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] session._wait_for_task(vmdk_copy_task) [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.wait_for_task(task_ref) [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return evt.wait() [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] result = hub.switch() [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.greenlet.switch() [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self.f(*self.args, **self.kw) [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] raise exceptions.translate_fault(task_info.error) [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Faults: ['InvalidArgument'] [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] During handling of the above exception, another exception occurred: [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Traceback (most recent call last): [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._build_and_run_instance(context, instance, image, [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] raise exception.RescheduledException( [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] nova.exception.RescheduledException: Build of instance fbd6a238-1662-4c22-86ab-d31d4bb82734 was re-scheduled: A specified parameter was not correct: fileType [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Faults: ['InvalidArgument'] [ 1365.899189] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] During handling of the above exception, another exception occurred: [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Traceback (most recent call last): [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] exception_handler_v20(status_code, error_body) [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] raise client_exc(message=error_message, [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Neutron server returns request_ids: ['req-aa1c6ad9-d85b-4037-bb15-16e01817ccc6'] [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] During handling of the above exception, another exception occurred: [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Traceback (most recent call last): [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._deallocate_network(context, instance, requested_networks) [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self.network_api.deallocate_for_instance( [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] data = neutron.list_ports(**search_opts) [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.list('ports', self.ports_path, retrieve_all, [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] for r in self._pagination(collection, path, **params): [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] res = self.get(path, params=params) [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.retry_request("GET", action, body=body, [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1365.901063] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.do_request(method, action, body=body, [ 1365.902917] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1365.902917] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1365.902917] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1365.902917] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._handle_fault_response(status_code, replybody, resp) [ 1365.902917] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1365.902917] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] raise exception.Unauthorized() [ 1365.902917] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] nova.exception.Unauthorized: Not authorized. [ 1365.902917] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1365.952047] env[69027]: INFO nova.scheduler.client.report [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Deleted allocations for instance fbd6a238-1662-4c22-86ab-d31d4bb82734 [ 1365.971506] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b50d7bea-e64f-41f6-b614-b2eb1911f3b8 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "fbd6a238-1662-4c22-86ab-d31d4bb82734" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 631.480s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1365.972897] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "fbd6a238-1662-4c22-86ab-d31d4bb82734" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.535s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.973144] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Acquiring lock "fbd6a238-1662-4c22-86ab-d31d4bb82734-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.973351] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "fbd6a238-1662-4c22-86ab-d31d4bb82734-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.973520] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "fbd6a238-1662-4c22-86ab-d31d4bb82734-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1365.975447] env[69027]: INFO nova.compute.manager [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Terminating instance [ 1365.977087] env[69027]: DEBUG nova.compute.manager [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1365.977287] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1365.977748] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fbb24aa7-d0e3-4b55-a8eb-2de1eee93192 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.987021] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb43a70-2eb3-4f30-ae07-103efac70375 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.997921] env[69027]: DEBUG nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1366.017855] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fbd6a238-1662-4c22-86ab-d31d4bb82734 could not be found. [ 1366.018090] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1366.018301] env[69027]: INFO nova.compute.manager [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1366.018523] env[69027]: DEBUG oslo.service.loopingcall [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1366.018764] env[69027]: DEBUG nova.compute.manager [-] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1366.018861] env[69027]: DEBUG nova.network.neutron [-] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1366.062220] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.062474] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.063982] env[69027]: INFO nova.compute.claims [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1366.106962] env[69027]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69027) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1366.107230] env[69027]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-f848d60b-caa5-4b7a-8ccb-d0c01d178fc6'] [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1366.107738] env[69027]: ERROR oslo.service.loopingcall [ 1366.109250] env[69027]: ERROR nova.compute.manager [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1366.136564] env[69027]: ERROR nova.compute.manager [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Traceback (most recent call last): [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] exception_handler_v20(status_code, error_body) [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] raise client_exc(message=error_message, [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Neutron server returns request_ids: ['req-f848d60b-caa5-4b7a-8ccb-d0c01d178fc6'] [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] During handling of the above exception, another exception occurred: [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Traceback (most recent call last): [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._delete_instance(context, instance, bdms) [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._shutdown_instance(context, instance, bdms) [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._try_deallocate_network(context, instance, requested_networks) [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] with excutils.save_and_reraise_exception(): [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self.force_reraise() [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] raise self.value [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] _deallocate_network_with_retries() [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return evt.wait() [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] result = hub.switch() [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.greenlet.switch() [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] result = func(*self.args, **self.kw) [ 1366.136564] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] result = f(*args, **kwargs) [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._deallocate_network( [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self.network_api.deallocate_for_instance( [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] data = neutron.list_ports(**search_opts) [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.list('ports', self.ports_path, retrieve_all, [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] for r in self._pagination(collection, path, **params): [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] res = self.get(path, params=params) [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.retry_request("GET", action, body=body, [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] return self.do_request(method, action, body=body, [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] ret = obj(*args, **kwargs) [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] self._handle_fault_response(status_code, replybody, resp) [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1366.137720] env[69027]: ERROR nova.compute.manager [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] [ 1366.163694] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Lock "fbd6a238-1662-4c22-86ab-d31d4bb82734" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.191s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.165194] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "fbd6a238-1662-4c22-86ab-d31d4bb82734" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 224.091s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.165382] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] During sync_power_state the instance has a pending task (deleting). Skip. [ 1366.165560] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "fbd6a238-1662-4c22-86ab-d31d4bb82734" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.222131] env[69027]: INFO nova.compute.manager [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] [instance: fbd6a238-1662-4c22-86ab-d31d4bb82734] Successfully reverted task state from None on failure for instance. [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server [None req-6b2db060-0126-4608-8c45-491622a3fd66 tempest-ListImageFiltersTestJSON-1244799674 tempest-ListImageFiltersTestJSON-1244799674-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-f848d60b-caa5-4b7a-8ccb-d0c01d178fc6'] [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1366.225931] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1366.227448] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1366.228871] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1366.228871] env[69027]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1366.228871] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1366.228871] env[69027]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1366.228871] env[69027]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1366.228871] env[69027]: ERROR oslo_messaging.rpc.server [ 1366.307124] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2c1410-db65-47b9-94ff-f0dd0200a540 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.315109] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f81054-36b3-40b1-acc4-c383a3430f03 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.345835] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96fc2ce-0240-4131-a3c8-9edc946f01bf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.352939] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2351ec52-5c30-4a0f-b6bf-1a713985cf43 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.366136] env[69027]: DEBUG nova.compute.provider_tree [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1366.374802] env[69027]: DEBUG nova.scheduler.client.report [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1366.387935] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.325s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.388428] env[69027]: DEBUG nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1366.420157] env[69027]: DEBUG nova.compute.utils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1366.422344] env[69027]: DEBUG nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1366.422531] env[69027]: DEBUG nova.network.neutron [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1366.432162] env[69027]: DEBUG nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1366.483565] env[69027]: DEBUG nova.policy [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64e2a52fd3564ac08c4a4359023ce373', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b751370bba1b4705957897afbd64490c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1366.496452] env[69027]: DEBUG nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1366.522355] env[69027]: DEBUG nova.virt.hardware [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1366.522593] env[69027]: DEBUG nova.virt.hardware [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1366.522796] env[69027]: DEBUG nova.virt.hardware [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1366.523183] env[69027]: DEBUG nova.virt.hardware [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1366.523183] env[69027]: DEBUG nova.virt.hardware [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1366.523351] env[69027]: DEBUG nova.virt.hardware [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1366.523496] env[69027]: DEBUG nova.virt.hardware [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1366.523657] env[69027]: DEBUG nova.virt.hardware [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1366.523823] env[69027]: DEBUG nova.virt.hardware [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1366.524016] env[69027]: DEBUG nova.virt.hardware [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1366.524217] env[69027]: DEBUG nova.virt.hardware [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1366.525061] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab53185-77e6-4136-839d-f6bbcacd298f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.533094] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa605a0-401c-4af5-b2a6-fd92a7c101d3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.777412] env[69027]: DEBUG nova.network.neutron [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Successfully created port: 42ea5eee-6339-4252-a0d0-9ff18acf804a {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1367.348994] env[69027]: DEBUG nova.compute.manager [req-fa8b2d01-682d-4686-aabf-1e95e2a06909 req-101a361a-a927-4d5d-a5f0-0e9fda5dcdd8 service nova] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Received event network-vif-plugged-42ea5eee-6339-4252-a0d0-9ff18acf804a {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1367.349300] env[69027]: DEBUG oslo_concurrency.lockutils [req-fa8b2d01-682d-4686-aabf-1e95e2a06909 req-101a361a-a927-4d5d-a5f0-0e9fda5dcdd8 service nova] Acquiring lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.349440] env[69027]: DEBUG oslo_concurrency.lockutils [req-fa8b2d01-682d-4686-aabf-1e95e2a06909 req-101a361a-a927-4d5d-a5f0-0e9fda5dcdd8 service nova] Lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.349619] env[69027]: DEBUG oslo_concurrency.lockutils [req-fa8b2d01-682d-4686-aabf-1e95e2a06909 req-101a361a-a927-4d5d-a5f0-0e9fda5dcdd8 service nova] Lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.349789] env[69027]: DEBUG nova.compute.manager [req-fa8b2d01-682d-4686-aabf-1e95e2a06909 req-101a361a-a927-4d5d-a5f0-0e9fda5dcdd8 service nova] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] No waiting events found dispatching network-vif-plugged-42ea5eee-6339-4252-a0d0-9ff18acf804a {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1367.350174] env[69027]: WARNING nova.compute.manager [req-fa8b2d01-682d-4686-aabf-1e95e2a06909 req-101a361a-a927-4d5d-a5f0-0e9fda5dcdd8 service nova] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Received unexpected event network-vif-plugged-42ea5eee-6339-4252-a0d0-9ff18acf804a for instance with vm_state building and task_state spawning. [ 1367.451010] env[69027]: DEBUG nova.network.neutron [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Successfully updated port: 42ea5eee-6339-4252-a0d0-9ff18acf804a {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1367.462454] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "refresh_cache-39ee164e-5c7c-44cf-9767-cef1b8560bfb" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.462607] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquired lock "refresh_cache-39ee164e-5c7c-44cf-9767-cef1b8560bfb" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.462749] env[69027]: DEBUG nova.network.neutron [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1367.522562] env[69027]: DEBUG nova.network.neutron [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1367.701821] env[69027]: DEBUG nova.network.neutron [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Updating instance_info_cache with network_info: [{"id": "42ea5eee-6339-4252-a0d0-9ff18acf804a", "address": "fa:16:3e:ef:10:6e", "network": {"id": "b0ef06f2-c074-4d90-b63b-6f6f3c5bd860", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1122167395-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b751370bba1b4705957897afbd64490c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42ea5eee-63", "ovs_interfaceid": "42ea5eee-6339-4252-a0d0-9ff18acf804a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.715852] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Releasing lock "refresh_cache-39ee164e-5c7c-44cf-9767-cef1b8560bfb" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.716402] env[69027]: DEBUG nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Instance network_info: |[{"id": "42ea5eee-6339-4252-a0d0-9ff18acf804a", "address": "fa:16:3e:ef:10:6e", "network": {"id": "b0ef06f2-c074-4d90-b63b-6f6f3c5bd860", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1122167395-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b751370bba1b4705957897afbd64490c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42ea5eee-63", "ovs_interfaceid": "42ea5eee-6339-4252-a0d0-9ff18acf804a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1367.717119] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:10:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8812601-ae67-4e0d-b9a2-710b86c53ac5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '42ea5eee-6339-4252-a0d0-9ff18acf804a', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1367.724566] env[69027]: DEBUG oslo.service.loopingcall [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1367.725041] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1367.725392] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e38451d-ef5b-427c-9167-6fcd4f0bf052 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.746284] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1367.746284] env[69027]: value = "task-3395201" [ 1367.746284] env[69027]: _type = "Task" [ 1367.746284] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.754133] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395201, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.257205] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395201, 'name': CreateVM_Task, 'duration_secs': 0.354281} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.257361] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1368.257987] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1368.258169] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1368.258487] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1368.258722] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1fb5afa-6361-40a3-b52a-d0d5e42994ff {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.263163] env[69027]: DEBUG oslo_vmware.api [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for the task: (returnval){ [ 1368.263163] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5274eccd-3989-3b52-bbd1-cfdb5f0eb2ea" [ 1368.263163] env[69027]: _type = "Task" [ 1368.263163] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.270632] env[69027]: DEBUG oslo_vmware.api [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5274eccd-3989-3b52-bbd1-cfdb5f0eb2ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.773743] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1368.774047] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1368.774223] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.373208] env[69027]: DEBUG nova.compute.manager [req-d4132299-c0b0-46ea-acec-51782f677156 req-24558200-5a20-4bcf-91b8-bed4be6694eb service nova] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Received event network-changed-42ea5eee-6339-4252-a0d0-9ff18acf804a {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1369.373377] env[69027]: DEBUG nova.compute.manager [req-d4132299-c0b0-46ea-acec-51782f677156 req-24558200-5a20-4bcf-91b8-bed4be6694eb service nova] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Refreshing instance network info cache due to event network-changed-42ea5eee-6339-4252-a0d0-9ff18acf804a. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1369.373593] env[69027]: DEBUG oslo_concurrency.lockutils [req-d4132299-c0b0-46ea-acec-51782f677156 req-24558200-5a20-4bcf-91b8-bed4be6694eb service nova] Acquiring lock "refresh_cache-39ee164e-5c7c-44cf-9767-cef1b8560bfb" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.373738] env[69027]: DEBUG oslo_concurrency.lockutils [req-d4132299-c0b0-46ea-acec-51782f677156 req-24558200-5a20-4bcf-91b8-bed4be6694eb service nova] Acquired lock "refresh_cache-39ee164e-5c7c-44cf-9767-cef1b8560bfb" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.373901] env[69027]: DEBUG nova.network.neutron [req-d4132299-c0b0-46ea-acec-51782f677156 req-24558200-5a20-4bcf-91b8-bed4be6694eb service nova] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Refreshing network info cache for port 42ea5eee-6339-4252-a0d0-9ff18acf804a {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1369.864428] env[69027]: DEBUG nova.network.neutron [req-d4132299-c0b0-46ea-acec-51782f677156 req-24558200-5a20-4bcf-91b8-bed4be6694eb service nova] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Updated VIF entry in instance network info cache for port 42ea5eee-6339-4252-a0d0-9ff18acf804a. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1369.864765] env[69027]: DEBUG nova.network.neutron [req-d4132299-c0b0-46ea-acec-51782f677156 req-24558200-5a20-4bcf-91b8-bed4be6694eb service nova] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Updating instance_info_cache with network_info: [{"id": "42ea5eee-6339-4252-a0d0-9ff18acf804a", "address": "fa:16:3e:ef:10:6e", "network": {"id": "b0ef06f2-c074-4d90-b63b-6f6f3c5bd860", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1122167395-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b751370bba1b4705957897afbd64490c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42ea5eee-63", "ovs_interfaceid": "42ea5eee-6339-4252-a0d0-9ff18acf804a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.875639] env[69027]: DEBUG oslo_concurrency.lockutils [req-d4132299-c0b0-46ea-acec-51782f677156 req-24558200-5a20-4bcf-91b8-bed4be6694eb service nova] Releasing lock "refresh_cache-39ee164e-5c7c-44cf-9767-cef1b8560bfb" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.768815] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1377.771658] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1378.771623] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1378.783708] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.783967] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.784126] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.784285] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1378.785809] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fceedceb-7021-4b6d-abb4-3da266803291 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.794566] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b702e1-7ab8-4f88-91de-2b25f2a59c48 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.808842] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a30a22f-5e2e-47ef-923e-bdbd23c24878 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.815424] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5deb1266-0996-449a-baf0-2f8132da1877 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.845950] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180938MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1378.846120] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.846327] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.921669] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.921849] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.921981] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bf4c80b4-bc0c-4198-9010-74fc50707745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.922111] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.922236] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a907f1ab-3540-4bc0-8389-005233cca940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.922353] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1715faa2-86ea-49f9-a993-1003aea54384 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.922469] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance edc3a0ff-c592-47b8-9753-1b4831bee576 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.922581] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.922693] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.922798] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1378.937584] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03d9d361-da15-4fb7-acfb-049098183bc3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1378.948940] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3054e7f1-4a02-47ca-91fd-4d8669004e8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1378.960646] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c6a011b8-3c47-4e37-a9f1-e36a546048ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1378.970663] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bfe81ca7-70dc-4e48-9f8b-afa901baec0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1378.980844] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b930e792-b0a8-45e4-9330-befac22182b7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1378.991384] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fc2ddcd1-d7cf-45d3-903f-247b00f48f2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1378.991384] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1378.991545] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1379.188095] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfb2e4f-6256-42c6-ade9-c2d2cab44b20 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.195733] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816d1423-6d37-48c2-b7ad-2444521e5f75 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.228038] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908a9cca-a9ba-4d21-bf78-bc7158074b0c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.235917] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9c3c9b-f207-4355-8500-0da84075ee31 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.249065] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1379.257623] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1379.271594] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1379.271783] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.425s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.272685] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1380.273034] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1380.273034] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1380.293675] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1380.293779] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1380.294194] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1380.294194] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1380.294194] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1380.294348] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1380.294392] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1380.294479] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1380.294591] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1380.294701] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1380.295348] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1381.771445] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.767066] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.787991] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.771677] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.772862] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.772862] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1385.772899] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1386.369907] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquiring lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.370189] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.761555] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquiring lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.761814] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.509109] env[69027]: WARNING oslo_vmware.rw_handles [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1414.509109] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1414.509109] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1414.509109] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1414.509109] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1414.509109] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1414.509109] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1414.509109] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1414.509109] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1414.509109] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1414.509109] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1414.509109] env[69027]: ERROR oslo_vmware.rw_handles [ 1414.509788] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/c7ceb0d7-496c-4747-8e97-fa5a41aa8c37/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1414.512108] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1414.512108] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Copying Virtual Disk [datastore2] vmware_temp/c7ceb0d7-496c-4747-8e97-fa5a41aa8c37/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/c7ceb0d7-496c-4747-8e97-fa5a41aa8c37/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1414.512299] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21662c28-d89d-44e7-a161-f6137cbc1bde {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.520552] env[69027]: DEBUG oslo_vmware.api [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Waiting for the task: (returnval){ [ 1414.520552] env[69027]: value = "task-3395202" [ 1414.520552] env[69027]: _type = "Task" [ 1414.520552] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.529728] env[69027]: DEBUG oslo_vmware.api [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Task: {'id': task-3395202, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.031211] env[69027]: DEBUG oslo_vmware.exceptions [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1415.031509] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.032089] env[69027]: ERROR nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1415.032089] env[69027]: Faults: ['InvalidArgument'] [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Traceback (most recent call last): [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] yield resources [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] self.driver.spawn(context, instance, image_meta, [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] self._fetch_image_if_missing(context, vi) [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] image_cache(vi, tmp_image_ds_loc) [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] vm_util.copy_virtual_disk( [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] session._wait_for_task(vmdk_copy_task) [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] return self.wait_for_task(task_ref) [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] return evt.wait() [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] result = hub.switch() [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] return self.greenlet.switch() [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] self.f(*self.args, **self.kw) [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] raise exceptions.translate_fault(task_info.error) [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Faults: ['InvalidArgument'] [ 1415.032089] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] [ 1415.032959] env[69027]: INFO nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Terminating instance [ 1415.034038] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.034225] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1415.034467] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9cdf17bb-7a25-4e71-935d-24108ac669fd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.036857] env[69027]: DEBUG nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1415.037069] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1415.038039] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d280bf89-788f-4bbf-a513-467bc52110a8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.045318] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1415.045575] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17b23631-e5b0-4b97-aea2-a273b326913e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.047947] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1415.048159] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1415.049121] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4f5cc70-bec3-4da1-8f70-c66a983d0efa {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.054015] env[69027]: DEBUG oslo_vmware.api [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Waiting for the task: (returnval){ [ 1415.054015] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52874c0d-b37d-20a7-aa0f-f730a8360dda" [ 1415.054015] env[69027]: _type = "Task" [ 1415.054015] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.065803] env[69027]: DEBUG oslo_vmware.api [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52874c0d-b37d-20a7-aa0f-f730a8360dda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.105958] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1415.106150] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1415.106336] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Deleting the datastore file [datastore2] 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1415.106595] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29020e48-21de-47c0-ae4f-343bf764f95a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.113609] env[69027]: DEBUG oslo_vmware.api [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Waiting for the task: (returnval){ [ 1415.113609] env[69027]: value = "task-3395204" [ 1415.113609] env[69027]: _type = "Task" [ 1415.113609] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.121175] env[69027]: DEBUG oslo_vmware.api [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Task: {'id': task-3395204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.563844] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1415.564229] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Creating directory with path [datastore2] vmware_temp/7ed261df-1df0-4c28-a0a8-124592601725/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1415.564354] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de91f99b-467c-4390-9d98-b5a6304b6919 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.575282] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Created directory with path [datastore2] vmware_temp/7ed261df-1df0-4c28-a0a8-124592601725/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1415.575542] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Fetch image to [datastore2] vmware_temp/7ed261df-1df0-4c28-a0a8-124592601725/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1415.575747] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/7ed261df-1df0-4c28-a0a8-124592601725/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1415.576385] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d398e8d-e7f9-4bda-9fb2-fb95ac871e85 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.582668] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5ea04c-13b3-4ee3-b5a7-f6c636aff98c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.591415] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7800e51b-7539-4355-8be3-99b92180c1d5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.623454] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42f5b52-653c-423f-8584-88a9f337b177 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.633024] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ab2be0a6-7b91-435c-9756-8cfddaf6ff93 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.633972] env[69027]: DEBUG oslo_vmware.api [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Task: {'id': task-3395204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073803} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.634237] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1415.634415] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1415.634581] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1415.634752] env[69027]: INFO nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1415.636878] env[69027]: DEBUG nova.compute.claims [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1415.637060] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.637277] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.655274] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1415.707332] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7ed261df-1df0-4c28-a0a8-124592601725/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1415.766826] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1415.767016] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7ed261df-1df0-4c28-a0a8-124592601725/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1415.928010] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e774e088-ee86-4cdc-8756-43de07cb37a3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.935474] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced397ae-5c41-42be-87b5-bfc7785e454a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.964494] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284f323a-d8f1-4753-ba8b-6c6796e54e2b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.971639] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3279761-fb4b-48d7-8be3-e548c503b39a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.984294] env[69027]: DEBUG nova.compute.provider_tree [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1415.992390] env[69027]: DEBUG nova.scheduler.client.report [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1416.006007] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.369s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.006537] env[69027]: ERROR nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1416.006537] env[69027]: Faults: ['InvalidArgument'] [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Traceback (most recent call last): [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] self.driver.spawn(context, instance, image_meta, [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] self._fetch_image_if_missing(context, vi) [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] image_cache(vi, tmp_image_ds_loc) [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] vm_util.copy_virtual_disk( [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] session._wait_for_task(vmdk_copy_task) [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] return self.wait_for_task(task_ref) [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] return evt.wait() [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] result = hub.switch() [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] return self.greenlet.switch() [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] self.f(*self.args, **self.kw) [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] raise exceptions.translate_fault(task_info.error) [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Faults: ['InvalidArgument'] [ 1416.006537] env[69027]: ERROR nova.compute.manager [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] [ 1416.007299] env[69027]: DEBUG nova.compute.utils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1416.008931] env[69027]: DEBUG nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Build of instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 was re-scheduled: A specified parameter was not correct: fileType [ 1416.008931] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1416.009320] env[69027]: DEBUG nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1416.009495] env[69027]: DEBUG nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1416.009663] env[69027]: DEBUG nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1416.009835] env[69027]: DEBUG nova.network.neutron [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1416.440311] env[69027]: DEBUG nova.network.neutron [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.454804] env[69027]: INFO nova.compute.manager [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Took 0.44 seconds to deallocate network for instance. [ 1416.571412] env[69027]: INFO nova.scheduler.client.report [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Deleted allocations for instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 [ 1416.598044] env[69027]: DEBUG oslo_concurrency.lockutils [None req-394fd883-8495-4bc1-9c64-bbbeb50eaf6a tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.876s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.598510] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.620s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.598803] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Acquiring lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.599071] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.599295] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.601270] env[69027]: INFO nova.compute.manager [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Terminating instance [ 1416.602969] env[69027]: DEBUG nova.compute.manager [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1416.603217] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1416.603725] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb4c7a32-1cb5-40b1-b0b4-471ba3e45155 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.614055] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0522c3e-0911-4ccc-a48c-1e526ab089c3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.623702] env[69027]: DEBUG nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1416.646077] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1757d80a-dc5f-4b8b-8e86-3562b36e1b21 could not be found. [ 1416.646077] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1416.646077] env[69027]: INFO nova.compute.manager [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1416.646077] env[69027]: DEBUG oslo.service.loopingcall [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1416.646077] env[69027]: DEBUG nova.compute.manager [-] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1416.646077] env[69027]: DEBUG nova.network.neutron [-] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1416.675957] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.675957] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.677211] env[69027]: INFO nova.compute.claims [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1416.680178] env[69027]: DEBUG nova.network.neutron [-] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.689803] env[69027]: INFO nova.compute.manager [-] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] Took 0.05 seconds to deallocate network for instance. [ 1416.788609] env[69027]: DEBUG oslo_concurrency.lockutils [None req-8393d1b1-56d1-4bcd-a19c-38fe1d2d1704 tempest-ServerMetadataTestJSON-114751732 tempest-ServerMetadataTestJSON-114751732-project-member] Lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.789482] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 274.715s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.789669] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1757d80a-dc5f-4b8b-8e86-3562b36e1b21] During sync_power_state the instance has a pending task (deleting). Skip. [ 1416.789852] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "1757d80a-dc5f-4b8b-8e86-3562b36e1b21" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.923747] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28fd181-469f-47e2-a631-5b9d92cf6152 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.931350] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15582cb3-0b3b-4107-908b-c78356d04496 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.962608] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cdf61e0-bad0-4208-844c-6c9173524262 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.969541] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab72b14-d6dc-467c-8c00-4d2776825e2c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.982070] env[69027]: DEBUG nova.compute.provider_tree [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.990394] env[69027]: DEBUG nova.scheduler.client.report [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1417.006188] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.331s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.006692] env[69027]: DEBUG nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1417.038101] env[69027]: DEBUG nova.compute.utils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1417.039463] env[69027]: DEBUG nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1417.039639] env[69027]: DEBUG nova.network.neutron [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1417.052673] env[69027]: DEBUG nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1417.099283] env[69027]: DEBUG nova.policy [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '79047db126994a79b8ffe3af1c7c43c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '459c3e2c96a047ddac468e91fbe438b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1417.115285] env[69027]: DEBUG nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1417.142152] env[69027]: DEBUG nova.virt.hardware [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1417.142401] env[69027]: DEBUG nova.virt.hardware [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1417.142559] env[69027]: DEBUG nova.virt.hardware [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1417.142733] env[69027]: DEBUG nova.virt.hardware [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1417.142880] env[69027]: DEBUG nova.virt.hardware [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1417.143037] env[69027]: DEBUG nova.virt.hardware [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1417.143244] env[69027]: DEBUG nova.virt.hardware [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1417.143403] env[69027]: DEBUG nova.virt.hardware [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1417.143579] env[69027]: DEBUG nova.virt.hardware [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1417.143797] env[69027]: DEBUG nova.virt.hardware [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1417.143954] env[69027]: DEBUG nova.virt.hardware [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1417.144832] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c0e2b1-b96d-4301-bcb8-e0338497e373 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.152695] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e141522-3227-433b-9ed9-2f4245efc79b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.508350] env[69027]: DEBUG nova.network.neutron [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Successfully created port: 26d7ccf0-6cd2-4441-9414-aa320b42d81d {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1418.162012] env[69027]: DEBUG nova.network.neutron [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Successfully updated port: 26d7ccf0-6cd2-4441-9414-aa320b42d81d {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1418.178425] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "refresh_cache-03d9d361-da15-4fb7-acfb-049098183bc3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.178658] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquired lock "refresh_cache-03d9d361-da15-4fb7-acfb-049098183bc3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.178744] env[69027]: DEBUG nova.network.neutron [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1418.231763] env[69027]: DEBUG nova.network.neutron [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1418.405606] env[69027]: DEBUG nova.network.neutron [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Updating instance_info_cache with network_info: [{"id": "26d7ccf0-6cd2-4441-9414-aa320b42d81d", "address": "fa:16:3e:ee:ed:27", "network": {"id": "b30e6669-7c3a-4191-8eeb-47271f0397fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2133150353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "459c3e2c96a047ddac468e91fbe438b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26d7ccf0-6c", "ovs_interfaceid": "26d7ccf0-6cd2-4441-9414-aa320b42d81d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.416829] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Releasing lock "refresh_cache-03d9d361-da15-4fb7-acfb-049098183bc3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.417142] env[69027]: DEBUG nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Instance network_info: |[{"id": "26d7ccf0-6cd2-4441-9414-aa320b42d81d", "address": "fa:16:3e:ee:ed:27", "network": {"id": "b30e6669-7c3a-4191-8eeb-47271f0397fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2133150353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "459c3e2c96a047ddac468e91fbe438b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26d7ccf0-6c", "ovs_interfaceid": "26d7ccf0-6cd2-4441-9414-aa320b42d81d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1418.417559] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:ed:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '664c466b-9417-49d7-83cc-364d964c403a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '26d7ccf0-6cd2-4441-9414-aa320b42d81d', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1418.425041] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Creating folder: Project (459c3e2c96a047ddac468e91fbe438b6). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1418.425535] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53043ed1-cd94-4cab-aae9-bfbaaf721909 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.436562] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Created folder: Project (459c3e2c96a047ddac468e91fbe438b6) in parent group-v677321. [ 1418.436756] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Creating folder: Instances. Parent ref: group-v677399. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1418.437469] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6762226-0b47-4f33-9fa1-9c3110747728 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.445319] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Created folder: Instances in parent group-v677399. [ 1418.445547] env[69027]: DEBUG oslo.service.loopingcall [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1418.445743] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1418.445954] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cfdeb8e3-2f18-4af8-85b6-f3c831b075cf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.466247] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1418.466247] env[69027]: value = "task-3395207" [ 1418.466247] env[69027]: _type = "Task" [ 1418.466247] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.473832] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395207, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.625195] env[69027]: DEBUG nova.compute.manager [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Received event network-vif-plugged-26d7ccf0-6cd2-4441-9414-aa320b42d81d {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1418.625432] env[69027]: DEBUG oslo_concurrency.lockutils [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] Acquiring lock "03d9d361-da15-4fb7-acfb-049098183bc3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.625645] env[69027]: DEBUG oslo_concurrency.lockutils [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] Lock "03d9d361-da15-4fb7-acfb-049098183bc3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.625844] env[69027]: DEBUG oslo_concurrency.lockutils [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] Lock "03d9d361-da15-4fb7-acfb-049098183bc3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.626023] env[69027]: DEBUG nova.compute.manager [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] No waiting events found dispatching network-vif-plugged-26d7ccf0-6cd2-4441-9414-aa320b42d81d {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1418.626194] env[69027]: WARNING nova.compute.manager [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Received unexpected event network-vif-plugged-26d7ccf0-6cd2-4441-9414-aa320b42d81d for instance with vm_state building and task_state spawning. [ 1418.626355] env[69027]: DEBUG nova.compute.manager [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Received event network-changed-26d7ccf0-6cd2-4441-9414-aa320b42d81d {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1418.626510] env[69027]: DEBUG nova.compute.manager [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Refreshing instance network info cache due to event network-changed-26d7ccf0-6cd2-4441-9414-aa320b42d81d. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1418.626780] env[69027]: DEBUG oslo_concurrency.lockutils [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] Acquiring lock "refresh_cache-03d9d361-da15-4fb7-acfb-049098183bc3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.626869] env[69027]: DEBUG oslo_concurrency.lockutils [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] Acquired lock "refresh_cache-03d9d361-da15-4fb7-acfb-049098183bc3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.627025] env[69027]: DEBUG nova.network.neutron [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Refreshing network info cache for port 26d7ccf0-6cd2-4441-9414-aa320b42d81d {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1418.946956] env[69027]: DEBUG nova.network.neutron [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Updated VIF entry in instance network info cache for port 26d7ccf0-6cd2-4441-9414-aa320b42d81d. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1418.947340] env[69027]: DEBUG nova.network.neutron [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Updating instance_info_cache with network_info: [{"id": "26d7ccf0-6cd2-4441-9414-aa320b42d81d", "address": "fa:16:3e:ee:ed:27", "network": {"id": "b30e6669-7c3a-4191-8eeb-47271f0397fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2133150353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "459c3e2c96a047ddac468e91fbe438b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26d7ccf0-6c", "ovs_interfaceid": "26d7ccf0-6cd2-4441-9414-aa320b42d81d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.957652] env[69027]: DEBUG oslo_concurrency.lockutils [req-718fc4fd-12dc-46a4-b690-c9b1fb77b166 req-f4e0dc7a-7b4e-43eb-9adf-dd08e6f1a6ea service nova] Releasing lock "refresh_cache-03d9d361-da15-4fb7-acfb-049098183bc3" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.975770] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395207, 'name': CreateVM_Task, 'duration_secs': 0.309357} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.975935] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1418.976574] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.976749] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.977070] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1418.977314] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4a918b9-cde8-456c-ad9a-e7cc3b0b0850 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.981533] env[69027]: DEBUG oslo_vmware.api [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for the task: (returnval){ [ 1418.981533] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52c36722-9a1d-a4d3-cfd7-e3953c3e2d81" [ 1418.981533] env[69027]: _type = "Task" [ 1418.981533] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.988514] env[69027]: DEBUG oslo_vmware.api [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52c36722-9a1d-a4d3-cfd7-e3953c3e2d81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.492343] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.492604] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1419.492831] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.712506] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.143127] env[69027]: DEBUG oslo_concurrency.lockutils [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "03d9d361-da15-4fb7-acfb-049098183bc3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.767565] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.771185] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.782274] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.782489] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.782654] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1438.782806] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1438.783924] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b65fa56-06b8-43a1-a79e-a2c11a55f43d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.792741] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d34021-04b3-4575-8fb0-23ef8c837aaf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.806457] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2d7356-879a-478b-a573-4e25aa560d94 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.812551] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a26f2e-4e56-45b9-bf56-039bad825012 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.840452] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180987MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1438.840593] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.840782] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.011729] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1439.011729] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bf4c80b4-bc0c-4198-9010-74fc50707745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1439.011729] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1439.011729] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a907f1ab-3540-4bc0-8389-005233cca940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1439.011729] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1715faa2-86ea-49f9-a993-1003aea54384 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1439.011729] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance edc3a0ff-c592-47b8-9753-1b4831bee576 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1439.011729] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1439.012488] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1439.012864] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1439.013405] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03d9d361-da15-4fb7-acfb-049098183bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1439.031026] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3054e7f1-4a02-47ca-91fd-4d8669004e8e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1439.043696] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c6a011b8-3c47-4e37-a9f1-e36a546048ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1439.056207] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bfe81ca7-70dc-4e48-9f8b-afa901baec0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1439.070593] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b930e792-b0a8-45e4-9330-befac22182b7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1439.083026] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fc2ddcd1-d7cf-45d3-903f-247b00f48f2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1439.092311] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1439.103347] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1439.103936] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1439.106088] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1439.293420] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca16800a-088f-4756-aa24-9bd72f6f1b8b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.300960] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2642775-4d6c-407c-b0c9-9827ec893bb6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.330016] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a674678-ee78-4766-8177-e446e18a3c75 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.336768] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bff7a52-62d2-4161-aba6-9d95fa0b99ec {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.349325] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1439.357377] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1439.370813] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1439.371020] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.530s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.371258] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1440.371535] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1440.371574] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1440.394244] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1440.394439] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1440.394582] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1440.394713] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1440.394839] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1440.394962] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1440.395241] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1440.395420] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1440.395595] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1440.395726] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1440.395845] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1440.396501] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1441.770805] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.771305] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.772251] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.772543] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.772656] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1445.772366] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.772665] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.772751] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances with incomplete migration {{(pid=69027) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1449.432106] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "1d3442ae-f46f-433d-bccb-f323463e3a21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.432423] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "1d3442ae-f46f-433d-bccb-f323463e3a21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.771562] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.781399] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1454.781653] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1454.795174] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] There are 1 instances to clean {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1454.795439] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 54340994-037e-4255-b32b-18d8784733c3] Instance has had 0 of 5 cleanup attempts {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11232}} [ 1457.999403] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.999697] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.272371] env[69027]: WARNING oslo_vmware.rw_handles [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1464.272371] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1464.272371] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1464.272371] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1464.272371] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1464.272371] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1464.272371] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1464.272371] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1464.272371] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1464.272371] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1464.272371] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1464.272371] env[69027]: ERROR oslo_vmware.rw_handles [ 1464.272371] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/7ed261df-1df0-4c28-a0a8-124592601725/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1464.272371] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1464.272371] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Copying Virtual Disk [datastore2] vmware_temp/7ed261df-1df0-4c28-a0a8-124592601725/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/7ed261df-1df0-4c28-a0a8-124592601725/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1464.273102] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f766031a-382a-4d3f-b1b5-5adc97bccd78 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.280911] env[69027]: DEBUG oslo_vmware.api [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Waiting for the task: (returnval){ [ 1464.280911] env[69027]: value = "task-3395208" [ 1464.280911] env[69027]: _type = "Task" [ 1464.280911] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.289259] env[69027]: DEBUG oslo_vmware.api [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Task: {'id': task-3395208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.791721] env[69027]: DEBUG oslo_vmware.exceptions [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1464.792031] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.792580] env[69027]: ERROR nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1464.792580] env[69027]: Faults: ['InvalidArgument'] [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Traceback (most recent call last): [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] yield resources [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] self.driver.spawn(context, instance, image_meta, [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] self._fetch_image_if_missing(context, vi) [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] image_cache(vi, tmp_image_ds_loc) [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] vm_util.copy_virtual_disk( [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] session._wait_for_task(vmdk_copy_task) [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] return self.wait_for_task(task_ref) [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] return evt.wait() [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] result = hub.switch() [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] return self.greenlet.switch() [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] self.f(*self.args, **self.kw) [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] raise exceptions.translate_fault(task_info.error) [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Faults: ['InvalidArgument'] [ 1464.792580] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] [ 1464.793406] env[69027]: INFO nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Terminating instance [ 1464.794547] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.794795] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1464.796098] env[69027]: DEBUG nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1464.796098] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1464.799664] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c9ee58e-7040-408c-83d7-36db8b73c303 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.799664] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82823a2-e1ef-48e1-919e-93ee80b38707 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.805928] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1464.806157] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-227ac6c5-0df9-4715-8a9f-d778fa61b1ea {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.808326] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1464.808492] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1464.809446] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abea47d3-42ba-42fd-b812-77f17883b687 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.814750] env[69027]: DEBUG oslo_vmware.api [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for the task: (returnval){ [ 1464.814750] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5202c646-718d-6490-b54a-be4041aedfe0" [ 1464.814750] env[69027]: _type = "Task" [ 1464.814750] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.821021] env[69027]: DEBUG oslo_vmware.api [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5202c646-718d-6490-b54a-be4041aedfe0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.875477] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1464.875706] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1464.875889] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Deleting the datastore file [datastore2] c099867e-d9e3-43a4-b2cb-568270d4aa6b {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1464.876209] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a7b0303-d05e-441d-891b-4d70a6fd7902 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.882610] env[69027]: DEBUG oslo_vmware.api [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Waiting for the task: (returnval){ [ 1464.882610] env[69027]: value = "task-3395210" [ 1464.882610] env[69027]: _type = "Task" [ 1464.882610] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.890251] env[69027]: DEBUG oslo_vmware.api [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Task: {'id': task-3395210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.325734] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1465.325734] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Creating directory with path [datastore2] vmware_temp/9d481cf4-d269-4fef-8e74-4cf14f446cfc/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1465.325734] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52a84d1d-6cf8-4894-9482-bc9c6225ebe9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.339077] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Created directory with path [datastore2] vmware_temp/9d481cf4-d269-4fef-8e74-4cf14f446cfc/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1465.339077] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Fetch image to [datastore2] vmware_temp/9d481cf4-d269-4fef-8e74-4cf14f446cfc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1465.339077] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/9d481cf4-d269-4fef-8e74-4cf14f446cfc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1465.339347] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28000013-7a8d-404d-a58c-6327946d607c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.348323] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad933be-c980-481d-99a3-d5a0fa06b8a4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.359810] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea620e0-a52a-4bb1-98af-b342bedf5bcd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.394066] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1fbed5a-311c-412b-92e6-c7e0db07b5ed {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.402707] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-93d47895-dab3-495f-8599-2cba60c2ad07 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.404449] env[69027]: DEBUG oslo_vmware.api [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Task: {'id': task-3395210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082927} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.404684] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1465.404861] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1465.405044] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1465.405223] env[69027]: INFO nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1465.407354] env[69027]: DEBUG nova.compute.claims [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1465.407534] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.407744] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.428025] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1465.492366] env[69027]: DEBUG oslo_vmware.rw_handles [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9d481cf4-d269-4fef-8e74-4cf14f446cfc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1465.561547] env[69027]: DEBUG oslo_vmware.rw_handles [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1465.561749] env[69027]: DEBUG oslo_vmware.rw_handles [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9d481cf4-d269-4fef-8e74-4cf14f446cfc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1465.734296] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1f488f-1716-4d0e-8a6d-edd304c4ba49 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.742055] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f044176d-b82b-4270-b9f8-91831b5cdb56 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.773362] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4c072b-90fd-42f6-87d3-c630cfd7338f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.780733] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceac8a1f-c6ca-42b6-9a4a-3e3512a755ab {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.793648] env[69027]: DEBUG nova.compute.provider_tree [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1465.802707] env[69027]: DEBUG nova.scheduler.client.report [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1465.817566] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.410s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.818129] env[69027]: ERROR nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1465.818129] env[69027]: Faults: ['InvalidArgument'] [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Traceback (most recent call last): [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] self.driver.spawn(context, instance, image_meta, [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] self._fetch_image_if_missing(context, vi) [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] image_cache(vi, tmp_image_ds_loc) [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] vm_util.copy_virtual_disk( [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] session._wait_for_task(vmdk_copy_task) [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] return self.wait_for_task(task_ref) [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] return evt.wait() [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] result = hub.switch() [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] return self.greenlet.switch() [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] self.f(*self.args, **self.kw) [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] raise exceptions.translate_fault(task_info.error) [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Faults: ['InvalidArgument'] [ 1465.818129] env[69027]: ERROR nova.compute.manager [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] [ 1465.818974] env[69027]: DEBUG nova.compute.utils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1465.821567] env[69027]: DEBUG nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Build of instance c099867e-d9e3-43a4-b2cb-568270d4aa6b was re-scheduled: A specified parameter was not correct: fileType [ 1465.821567] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1465.821937] env[69027]: DEBUG nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1465.822130] env[69027]: DEBUG nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1465.822302] env[69027]: DEBUG nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1465.822467] env[69027]: DEBUG nova.network.neutron [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1466.124987] env[69027]: DEBUG nova.network.neutron [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.161294] env[69027]: INFO nova.compute.manager [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Took 0.34 seconds to deallocate network for instance. [ 1466.268043] env[69027]: INFO nova.scheduler.client.report [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Deleted allocations for instance c099867e-d9e3-43a4-b2cb-568270d4aa6b [ 1466.289361] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5b41bf3-375d-4c58-b6e3-131cc1701834 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.435s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.290473] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 440.755s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.290923] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Acquiring lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.291162] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.291352] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.293366] env[69027]: INFO nova.compute.manager [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Terminating instance [ 1466.295428] env[69027]: DEBUG nova.compute.manager [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1466.295626] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1466.296098] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55010ff3-6e4f-491d-91a4-cca65746689b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.306304] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81941e2-df1e-48c2-b905-c7dc1ca114f4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.316577] env[69027]: DEBUG nova.compute.manager [None req-418a2ef6-73d0-494d-8722-304c1351ecf2 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 3054e7f1-4a02-47ca-91fd-4d8669004e8e] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1466.338378] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c099867e-d9e3-43a4-b2cb-568270d4aa6b could not be found. [ 1466.338639] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1466.338765] env[69027]: INFO nova.compute.manager [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1466.339073] env[69027]: DEBUG oslo.service.loopingcall [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1466.339309] env[69027]: DEBUG nova.compute.manager [-] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1466.339406] env[69027]: DEBUG nova.network.neutron [-] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1466.344880] env[69027]: DEBUG nova.compute.manager [None req-418a2ef6-73d0-494d-8722-304c1351ecf2 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 3054e7f1-4a02-47ca-91fd-4d8669004e8e] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1466.366375] env[69027]: DEBUG oslo_concurrency.lockutils [None req-418a2ef6-73d0-494d-8722-304c1351ecf2 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "3054e7f1-4a02-47ca-91fd-4d8669004e8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.246s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.367861] env[69027]: DEBUG nova.network.neutron [-] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.376218] env[69027]: DEBUG nova.compute.manager [None req-99ab5b10-dbaf-4be1-8b5f-e21f9f027a30 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: c6a011b8-3c47-4e37-a9f1-e36a546048ca] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1466.378713] env[69027]: INFO nova.compute.manager [-] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] Took 0.04 seconds to deallocate network for instance. [ 1466.403287] env[69027]: DEBUG nova.compute.manager [None req-99ab5b10-dbaf-4be1-8b5f-e21f9f027a30 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: c6a011b8-3c47-4e37-a9f1-e36a546048ca] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1466.427233] env[69027]: DEBUG oslo_concurrency.lockutils [None req-99ab5b10-dbaf-4be1-8b5f-e21f9f027a30 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "c6a011b8-3c47-4e37-a9f1-e36a546048ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.766s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.437917] env[69027]: DEBUG nova.compute.manager [None req-906ac4c6-1d8b-467c-bc05-d7dd024b8866 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] [instance: bfe81ca7-70dc-4e48-9f8b-afa901baec0a] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1466.463468] env[69027]: DEBUG nova.compute.manager [None req-906ac4c6-1d8b-467c-bc05-d7dd024b8866 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] [instance: bfe81ca7-70dc-4e48-9f8b-afa901baec0a] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1466.475279] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a46bc731-10af-44fe-a479-51e1a79e15f1 tempest-SecurityGroupsTestJSON-1456058094 tempest-SecurityGroupsTestJSON-1456058094-project-member] Lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.476133] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 324.401s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.476379] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c099867e-d9e3-43a4-b2cb-568270d4aa6b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1466.476510] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "c099867e-d9e3-43a4-b2cb-568270d4aa6b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.487482] env[69027]: DEBUG oslo_concurrency.lockutils [None req-906ac4c6-1d8b-467c-bc05-d7dd024b8866 tempest-AttachInterfacesTestJSON-407657669 tempest-AttachInterfacesTestJSON-407657669-project-member] Lock "bfe81ca7-70dc-4e48-9f8b-afa901baec0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.107s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.495924] env[69027]: DEBUG nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1466.548875] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.548875] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.550951] env[69027]: INFO nova.compute.claims [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1466.787559] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7315a871-3681-44a1-ae4b-b5917d9391f7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.797761] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70fa28d-348d-45d0-b67b-e90a876d6d0b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.826338] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e74b294-2f50-4b18-b552-3e52add9e67a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.833501] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b27031-a32d-4f91-8322-5019b68863f0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.846568] env[69027]: DEBUG nova.compute.provider_tree [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1466.855911] env[69027]: DEBUG nova.scheduler.client.report [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1466.871609] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.323s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.872114] env[69027]: DEBUG nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1466.906614] env[69027]: DEBUG nova.compute.utils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1466.908528] env[69027]: DEBUG nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1466.908734] env[69027]: DEBUG nova.network.neutron [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1466.917716] env[69027]: DEBUG nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1466.983473] env[69027]: DEBUG nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1466.999348] env[69027]: DEBUG nova.policy [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84cb84f21baf49b0b331f601a107c990', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '78a13bae334a4ef7959f0d408926ca33', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1467.009047] env[69027]: DEBUG nova.virt.hardware [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1467.009280] env[69027]: DEBUG nova.virt.hardware [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1467.009438] env[69027]: DEBUG nova.virt.hardware [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1467.009620] env[69027]: DEBUG nova.virt.hardware [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1467.009818] env[69027]: DEBUG nova.virt.hardware [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1467.009975] env[69027]: DEBUG nova.virt.hardware [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1467.010216] env[69027]: DEBUG nova.virt.hardware [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1467.010379] env[69027]: DEBUG nova.virt.hardware [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1467.010559] env[69027]: DEBUG nova.virt.hardware [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1467.010730] env[69027]: DEBUG nova.virt.hardware [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1467.011197] env[69027]: DEBUG nova.virt.hardware [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1467.012114] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0dbf133-e8fc-4aa3-b54e-39b557039d7e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.020817] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1c03f0-6e04-41e1-89aa-c8670467aabc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.419275] env[69027]: DEBUG nova.network.neutron [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Successfully created port: 3f043f8c-afc2-4916-802d-51370e89b43d {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1468.317911] env[69027]: DEBUG nova.compute.manager [req-752a3f07-8986-4cf0-9157-91ccf7bec035 req-a90756d2-101e-4ba8-8eb9-3087c4ee3838 service nova] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Received event network-vif-plugged-3f043f8c-afc2-4916-802d-51370e89b43d {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1468.318154] env[69027]: DEBUG oslo_concurrency.lockutils [req-752a3f07-8986-4cf0-9157-91ccf7bec035 req-a90756d2-101e-4ba8-8eb9-3087c4ee3838 service nova] Acquiring lock "b930e792-b0a8-45e4-9330-befac22182b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.318322] env[69027]: DEBUG oslo_concurrency.lockutils [req-752a3f07-8986-4cf0-9157-91ccf7bec035 req-a90756d2-101e-4ba8-8eb9-3087c4ee3838 service nova] Lock "b930e792-b0a8-45e4-9330-befac22182b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.318521] env[69027]: DEBUG oslo_concurrency.lockutils [req-752a3f07-8986-4cf0-9157-91ccf7bec035 req-a90756d2-101e-4ba8-8eb9-3087c4ee3838 service nova] Lock "b930e792-b0a8-45e4-9330-befac22182b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.318699] env[69027]: DEBUG nova.compute.manager [req-752a3f07-8986-4cf0-9157-91ccf7bec035 req-a90756d2-101e-4ba8-8eb9-3087c4ee3838 service nova] [instance: b930e792-b0a8-45e4-9330-befac22182b7] No waiting events found dispatching network-vif-plugged-3f043f8c-afc2-4916-802d-51370e89b43d {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1468.318868] env[69027]: WARNING nova.compute.manager [req-752a3f07-8986-4cf0-9157-91ccf7bec035 req-a90756d2-101e-4ba8-8eb9-3087c4ee3838 service nova] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Received unexpected event network-vif-plugged-3f043f8c-afc2-4916-802d-51370e89b43d for instance with vm_state building and task_state spawning. [ 1468.406964] env[69027]: DEBUG nova.network.neutron [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Successfully updated port: 3f043f8c-afc2-4916-802d-51370e89b43d {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1468.418402] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "refresh_cache-b930e792-b0a8-45e4-9330-befac22182b7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1468.418593] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquired lock "refresh_cache-b930e792-b0a8-45e4-9330-befac22182b7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.418754] env[69027]: DEBUG nova.network.neutron [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1468.466126] env[69027]: DEBUG nova.network.neutron [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1468.654161] env[69027]: DEBUG nova.network.neutron [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Updating instance_info_cache with network_info: [{"id": "3f043f8c-afc2-4916-802d-51370e89b43d", "address": "fa:16:3e:0a:9c:85", "network": {"id": "404b4355-dae6-4ce3-a75a-51ccc2099a23", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-164317621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78a13bae334a4ef7959f0d408926ca33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f043f8c-af", "ovs_interfaceid": "3f043f8c-afc2-4916-802d-51370e89b43d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.670251] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Releasing lock "refresh_cache-b930e792-b0a8-45e4-9330-befac22182b7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.670566] env[69027]: DEBUG nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Instance network_info: |[{"id": "3f043f8c-afc2-4916-802d-51370e89b43d", "address": "fa:16:3e:0a:9c:85", "network": {"id": "404b4355-dae6-4ce3-a75a-51ccc2099a23", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-164317621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78a13bae334a4ef7959f0d408926ca33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f043f8c-af", "ovs_interfaceid": "3f043f8c-afc2-4916-802d-51370e89b43d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1468.671008] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:9c:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24376631-ee89-4ff1-b8ac-f09911fc8329', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f043f8c-afc2-4916-802d-51370e89b43d', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1468.679612] env[69027]: DEBUG oslo.service.loopingcall [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1468.680181] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1468.680629] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ff380ea-9750-4041-a1b8-6d739e360571 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.702053] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1468.702053] env[69027]: value = "task-3395211" [ 1468.702053] env[69027]: _type = "Task" [ 1468.702053] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.709845] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395211, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.214435] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395211, 'name': CreateVM_Task, 'duration_secs': 0.280559} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.214600] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1469.215287] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.215450] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.215786] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1469.216051] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bf85aa9-b785-4593-abe3-08a3993571c5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.220921] env[69027]: DEBUG oslo_vmware.api [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for the task: (returnval){ [ 1469.220921] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]525b9d1b-cc55-7bca-05b3-01a735587dfb" [ 1469.220921] env[69027]: _type = "Task" [ 1469.220921] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.233621] env[69027]: DEBUG oslo_vmware.api [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]525b9d1b-cc55-7bca-05b3-01a735587dfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.732615] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1469.732924] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1469.733154] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.299443] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f3833cdd-e788-4314-95d5-5e5b269b16ca tempest-ServersNegativeTestJSON-332394447 tempest-ServersNegativeTestJSON-332394447-project-member] Acquiring lock "ed6243ed-cd06-46cd-b592-f05d5cd83139" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.299671] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f3833cdd-e788-4314-95d5-5e5b269b16ca tempest-ServersNegativeTestJSON-332394447 tempest-ServersNegativeTestJSON-332394447-project-member] Lock "ed6243ed-cd06-46cd-b592-f05d5cd83139" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.374682] env[69027]: DEBUG nova.compute.manager [req-34642d25-b7ba-48d5-a056-2bf20c4ea956 req-1e4bd6f5-7f60-4aac-b2d7-d1807d8644ba service nova] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Received event network-changed-3f043f8c-afc2-4916-802d-51370e89b43d {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1470.374823] env[69027]: DEBUG nova.compute.manager [req-34642d25-b7ba-48d5-a056-2bf20c4ea956 req-1e4bd6f5-7f60-4aac-b2d7-d1807d8644ba service nova] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Refreshing instance network info cache due to event network-changed-3f043f8c-afc2-4916-802d-51370e89b43d. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1470.375084] env[69027]: DEBUG oslo_concurrency.lockutils [req-34642d25-b7ba-48d5-a056-2bf20c4ea956 req-1e4bd6f5-7f60-4aac-b2d7-d1807d8644ba service nova] Acquiring lock "refresh_cache-b930e792-b0a8-45e4-9330-befac22182b7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.375392] env[69027]: DEBUG oslo_concurrency.lockutils [req-34642d25-b7ba-48d5-a056-2bf20c4ea956 req-1e4bd6f5-7f60-4aac-b2d7-d1807d8644ba service nova] Acquired lock "refresh_cache-b930e792-b0a8-45e4-9330-befac22182b7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.375683] env[69027]: DEBUG nova.network.neutron [req-34642d25-b7ba-48d5-a056-2bf20c4ea956 req-1e4bd6f5-7f60-4aac-b2d7-d1807d8644ba service nova] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Refreshing network info cache for port 3f043f8c-afc2-4916-802d-51370e89b43d {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1470.653224] env[69027]: DEBUG nova.network.neutron [req-34642d25-b7ba-48d5-a056-2bf20c4ea956 req-1e4bd6f5-7f60-4aac-b2d7-d1807d8644ba service nova] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Updated VIF entry in instance network info cache for port 3f043f8c-afc2-4916-802d-51370e89b43d. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1470.653408] env[69027]: DEBUG nova.network.neutron [req-34642d25-b7ba-48d5-a056-2bf20c4ea956 req-1e4bd6f5-7f60-4aac-b2d7-d1807d8644ba service nova] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Updating instance_info_cache with network_info: [{"id": "3f043f8c-afc2-4916-802d-51370e89b43d", "address": "fa:16:3e:0a:9c:85", "network": {"id": "404b4355-dae6-4ce3-a75a-51ccc2099a23", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-164317621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "78a13bae334a4ef7959f0d408926ca33", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24376631-ee89-4ff1-b8ac-f09911fc8329", "external-id": "nsx-vlan-transportzone-960", "segmentation_id": 960, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f043f8c-af", "ovs_interfaceid": "3f043f8c-afc2-4916-802d-51370e89b43d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.664732] env[69027]: DEBUG oslo_concurrency.lockutils [req-34642d25-b7ba-48d5-a056-2bf20c4ea956 req-1e4bd6f5-7f60-4aac-b2d7-d1807d8644ba service nova] Releasing lock "refresh_cache-b930e792-b0a8-45e4-9330-befac22182b7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.928366] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "b930e792-b0a8-45e4-9330-befac22182b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.825136] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1498.837310] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.837546] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.837725] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.837880] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1498.838995] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580d2c4c-e681-43ed-bead-1a8c9e8f8bb7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.848774] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae9cfff-2019-45fc-8f90-ffe329ee321a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.862265] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1375a9-18d6-4d4b-840a-d484cc984898 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.868360] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28080c05-3690-4c7a-add3-e110a91ba337 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.896404] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180978MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1498.896573] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.896774] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.971165] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance bf4c80b4-bc0c-4198-9010-74fc50707745 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.971330] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.971458] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a907f1ab-3540-4bc0-8389-005233cca940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.971581] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1715faa2-86ea-49f9-a993-1003aea54384 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.971700] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance edc3a0ff-c592-47b8-9753-1b4831bee576 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.971815] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.971930] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.972055] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.972171] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03d9d361-da15-4fb7-acfb-049098183bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.972282] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b930e792-b0a8-45e4-9330-befac22182b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1498.983112] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1498.993270] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1499.002572] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1d3442ae-f46f-433d-bccb-f323463e3a21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1499.013431] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1499.022666] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ed6243ed-cd06-46cd-b592-f05d5cd83139 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1499.022882] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1499.023035] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1499.039500] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Refreshing inventories for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1499.053854] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Updating ProviderTree inventory for provider 4923c91f-3b2b-4ad1-a821-36209acae639 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1499.054048] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Updating inventory in ProviderTree for provider 4923c91f-3b2b-4ad1-a821-36209acae639 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1499.064158] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Refreshing aggregate associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, aggregates: None {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1499.081996] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Refreshing trait associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1499.250565] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a95661-0abd-42e7-82ad-3996c8d61c2e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.258300] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a82639f-93bf-40aa-8a67-a91caa57729a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.288497] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e048b78-dc9f-433c-9b50-6ef354ccc064 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.295270] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db3f546-5d94-43c2-abf2-7ab4fb0c51ea {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.307966] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1499.316191] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1499.329276] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1499.329546] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.433s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.271626] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1501.271893] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1501.272040] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1501.272171] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1501.292087] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1501.292243] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1501.292378] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1501.292506] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1501.292631] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1501.292754] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1501.292873] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1501.292991] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1501.293126] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1501.293243] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1501.293362] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1501.770934] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1501.771206] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1503.771630] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1504.772092] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1504.772092] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1505.771472] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.768330] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1506.790185] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.544323] env[69027]: WARNING oslo_vmware.rw_handles [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1514.544323] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1514.544323] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1514.544323] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1514.544323] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1514.544323] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1514.544323] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1514.544323] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1514.544323] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1514.544323] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1514.544323] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1514.544323] env[69027]: ERROR oslo_vmware.rw_handles [ 1514.545009] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/9d481cf4-d269-4fef-8e74-4cf14f446cfc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1514.546733] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1514.546992] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Copying Virtual Disk [datastore2] vmware_temp/9d481cf4-d269-4fef-8e74-4cf14f446cfc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/9d481cf4-d269-4fef-8e74-4cf14f446cfc/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1514.547296] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8da38096-a6e1-4a87-ac46-7e8da041da37 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.556042] env[69027]: DEBUG oslo_vmware.api [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for the task: (returnval){ [ 1514.556042] env[69027]: value = "task-3395212" [ 1514.556042] env[69027]: _type = "Task" [ 1514.556042] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.565193] env[69027]: DEBUG oslo_vmware.api [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Task: {'id': task-3395212, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.067491] env[69027]: DEBUG oslo_vmware.exceptions [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1515.067772] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.068348] env[69027]: ERROR nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1515.068348] env[69027]: Faults: ['InvalidArgument'] [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Traceback (most recent call last): [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] yield resources [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] self.driver.spawn(context, instance, image_meta, [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] self._fetch_image_if_missing(context, vi) [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] image_cache(vi, tmp_image_ds_loc) [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] vm_util.copy_virtual_disk( [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] session._wait_for_task(vmdk_copy_task) [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] return self.wait_for_task(task_ref) [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] return evt.wait() [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] result = hub.switch() [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] return self.greenlet.switch() [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] self.f(*self.args, **self.kw) [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] raise exceptions.translate_fault(task_info.error) [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Faults: ['InvalidArgument'] [ 1515.068348] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] [ 1515.069441] env[69027]: INFO nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Terminating instance [ 1515.070226] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.070433] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1515.070659] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bcc70f68-23fd-4d75-9573-c6e6304a8454 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.072755] env[69027]: DEBUG nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1515.072950] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1515.073663] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b505324-432f-44f4-aa63-3a53d066bdce {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.080613] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1515.081502] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1716df68-4d07-4af6-a88e-3ce6c14b83fb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.082803] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1515.082974] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1515.083636] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-980e5eab-a399-460b-b2d2-d8aeb3a00e91 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.088329] env[69027]: DEBUG oslo_vmware.api [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Waiting for the task: (returnval){ [ 1515.088329] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52d26a10-2b5e-4966-7c45-38d78dcc1215" [ 1515.088329] env[69027]: _type = "Task" [ 1515.088329] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.095181] env[69027]: DEBUG oslo_vmware.api [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52d26a10-2b5e-4966-7c45-38d78dcc1215, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.145921] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1515.146157] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1515.146362] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Deleting the datastore file [datastore2] bf4c80b4-bc0c-4198-9010-74fc50707745 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1515.146677] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c05d2b29-cc58-4fff-9ce0-5ee21cc3f8a6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.152218] env[69027]: DEBUG oslo_vmware.api [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for the task: (returnval){ [ 1515.152218] env[69027]: value = "task-3395214" [ 1515.152218] env[69027]: _type = "Task" [ 1515.152218] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.159535] env[69027]: DEBUG oslo_vmware.api [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Task: {'id': task-3395214, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.598292] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1515.598587] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Creating directory with path [datastore2] vmware_temp/e1fd8892-d21a-4caf-8027-9462544d102f/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1515.598775] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-052badd1-e8b7-4418-9d8e-620ad6bd4949 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.610235] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Created directory with path [datastore2] vmware_temp/e1fd8892-d21a-4caf-8027-9462544d102f/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1515.610428] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Fetch image to [datastore2] vmware_temp/e1fd8892-d21a-4caf-8027-9462544d102f/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1515.610611] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/e1fd8892-d21a-4caf-8027-9462544d102f/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1515.611349] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8553e80-2882-4a6b-a673-842a1d4d461d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.617797] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dce6d4a-9ed8-48e9-8bc6-f3a2a20a5045 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.626429] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe6d9391-85da-4847-8c65-d775d814d3d7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.658098] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9787bd-435f-43f3-b631-aa08ba3117bc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.664478] env[69027]: DEBUG oslo_vmware.api [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Task: {'id': task-3395214, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080655} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.665860] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1515.666055] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1515.666237] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1515.666411] env[69027]: INFO nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1515.668210] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9f22aff4-18b2-4a6f-b538-202490d852fc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.670012] env[69027]: DEBUG nova.compute.claims [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1515.670202] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.670416] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.691839] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1515.893872] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.894684] env[69027]: ERROR nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Traceback (most recent call last): [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] result = getattr(controller, method)(*args, **kwargs) [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._get(image_id) [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] resp, body = self.http_client.get(url, headers=header) [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.request(url, 'GET', **kwargs) [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._handle_response(resp) [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise exc.from_response(resp, resp.content) [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] During handling of the above exception, another exception occurred: [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Traceback (most recent call last): [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] yield resources [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self.driver.spawn(context, instance, image_meta, [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._fetch_image_if_missing(context, vi) [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] image_fetch(context, vi, tmp_image_ds_loc) [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] images.fetch_image( [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1515.894684] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] metadata = IMAGE_API.get(context, image_ref) [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return session.show(context, image_id, [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] _reraise_translated_image_exception(image_id) [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise new_exc.with_traceback(exc_trace) [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] result = getattr(controller, method)(*args, **kwargs) [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._get(image_id) [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] resp, body = self.http_client.get(url, headers=header) [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.request(url, 'GET', **kwargs) [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._handle_response(resp) [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise exc.from_response(resp, resp.content) [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] nova.exception.ImageNotAuthorized: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1515.896338] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1515.896338] env[69027]: INFO nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Terminating instance [ 1515.897135] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.897135] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1515.897376] env[69027]: DEBUG nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1515.897511] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1515.897775] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71d3e826-0145-424e-90e9-deb0ef36085c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.900473] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-858ea85e-e04c-4f32-ae5c-26f29298ca4d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.903931] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2d4a9e-f372-4bb8-ba88-d75fe32fe09f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.911105] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1515.911319] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6612674-4dd5-413e-af1d-0fb463af59fd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.914890] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1515.915085] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1515.916269] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-879d956e-6f04-4126-9fbe-06d654443421 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.920704] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac2b1ee-a122-4ee5-8056-4dc2d4048d17 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.924665] env[69027]: DEBUG oslo_vmware.api [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for the task: (returnval){ [ 1515.924665] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52978f37-a201-6a43-8490-9c24496843b6" [ 1515.924665] env[69027]: _type = "Task" [ 1515.924665] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.955774] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a6e3d0-90ff-4cb3-9873-0503aacffa62 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.958311] env[69027]: DEBUG oslo_vmware.api [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52978f37-a201-6a43-8490-9c24496843b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.962950] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c31cceb-c6d3-4e96-a9f2-4a29b2e75380 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.977692] env[69027]: DEBUG nova.compute.provider_tree [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1515.979890] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1515.980102] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1515.980283] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Deleting the datastore file [datastore2] 90a6375b-4834-406d-abd5-5cf47b7cfc12 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1515.980728] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7451300-da0e-4ec8-bc60-98a73246224e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.986387] env[69027]: DEBUG oslo_vmware.api [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Waiting for the task: (returnval){ [ 1515.986387] env[69027]: value = "task-3395216" [ 1515.986387] env[69027]: _type = "Task" [ 1515.986387] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.990599] env[69027]: DEBUG nova.scheduler.client.report [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1515.998494] env[69027]: DEBUG oslo_vmware.api [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Task: {'id': task-3395216, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.006496] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.336s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.007089] env[69027]: ERROR nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1516.007089] env[69027]: Faults: ['InvalidArgument'] [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Traceback (most recent call last): [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] self.driver.spawn(context, instance, image_meta, [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] self._fetch_image_if_missing(context, vi) [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] image_cache(vi, tmp_image_ds_loc) [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] vm_util.copy_virtual_disk( [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] session._wait_for_task(vmdk_copy_task) [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] return self.wait_for_task(task_ref) [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] return evt.wait() [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] result = hub.switch() [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] return self.greenlet.switch() [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] self.f(*self.args, **self.kw) [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] raise exceptions.translate_fault(task_info.error) [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Faults: ['InvalidArgument'] [ 1516.007089] env[69027]: ERROR nova.compute.manager [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] [ 1516.008130] env[69027]: DEBUG nova.compute.utils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1516.009550] env[69027]: DEBUG nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Build of instance bf4c80b4-bc0c-4198-9010-74fc50707745 was re-scheduled: A specified parameter was not correct: fileType [ 1516.009550] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1516.009966] env[69027]: DEBUG nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1516.010169] env[69027]: DEBUG nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1516.010362] env[69027]: DEBUG nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1516.010617] env[69027]: DEBUG nova.network.neutron [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1516.371951] env[69027]: DEBUG nova.network.neutron [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.388282] env[69027]: INFO nova.compute.manager [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Took 0.38 seconds to deallocate network for instance. [ 1516.435134] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1516.435414] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Creating directory with path [datastore2] vmware_temp/398b0c1f-658e-4109-8170-7db91c94eec8/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1516.435680] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e190661-2475-41ac-ab76-234835b135e7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.448240] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Created directory with path [datastore2] vmware_temp/398b0c1f-658e-4109-8170-7db91c94eec8/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1516.449064] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Fetch image to [datastore2] vmware_temp/398b0c1f-658e-4109-8170-7db91c94eec8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1516.449064] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/398b0c1f-658e-4109-8170-7db91c94eec8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1516.449505] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190dbcc7-c39a-4d28-9baa-2d759cba3a68 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.457543] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f2b1b9-302d-4256-a445-5d32d5d39a14 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.467497] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85cfdb5-ad62-4941-834a-8b69ef91e623 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.506681] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f41641d-b441-430c-856c-352aab1f0edd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.517008] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-71f6b8ef-9b98-4064-86c2-7ffe6eec3401 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.519380] env[69027]: DEBUG oslo_vmware.api [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Task: {'id': task-3395216, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074828} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.519968] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1516.520183] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1516.520358] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1516.520530] env[69027]: INFO nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1516.522846] env[69027]: DEBUG nova.compute.claims [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1516.523062] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.523293] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.532347] env[69027]: INFO nova.scheduler.client.report [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Deleted allocations for instance bf4c80b4-bc0c-4198-9010-74fc50707745 [ 1516.546176] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1516.566198] env[69027]: DEBUG oslo_concurrency.lockutils [None req-762b5387-40bb-4a58-b5a3-9ea3c8639928 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "bf4c80b4-bc0c-4198-9010-74fc50707745" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 631.594s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.567693] env[69027]: DEBUG oslo_concurrency.lockutils [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "bf4c80b4-bc0c-4198-9010-74fc50707745" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.506s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.567957] env[69027]: DEBUG oslo_concurrency.lockutils [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "bf4c80b4-bc0c-4198-9010-74fc50707745-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.568519] env[69027]: DEBUG oslo_concurrency.lockutils [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "bf4c80b4-bc0c-4198-9010-74fc50707745-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.568519] env[69027]: DEBUG oslo_concurrency.lockutils [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "bf4c80b4-bc0c-4198-9010-74fc50707745-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.570624] env[69027]: INFO nova.compute.manager [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Terminating instance [ 1516.572543] env[69027]: DEBUG nova.compute.manager [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1516.572750] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1516.573239] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f6b0caf-55da-4d49-bc1d-e0bbad37617b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.582847] env[69027]: DEBUG nova.compute.manager [None req-3f8ced9e-1cdd-487e-91f8-81d81e8e4d95 tempest-ServerRescueTestJSON-1099885945 tempest-ServerRescueTestJSON-1099885945-project-member] [instance: fc2ddcd1-d7cf-45d3-903f-247b00f48f2b] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1516.589198] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12951702-e43d-45f4-9505-0a608184c445 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.612282] env[69027]: DEBUG nova.compute.manager [None req-3f8ced9e-1cdd-487e-91f8-81d81e8e4d95 tempest-ServerRescueTestJSON-1099885945 tempest-ServerRescueTestJSON-1099885945-project-member] [instance: fc2ddcd1-d7cf-45d3-903f-247b00f48f2b] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1516.623581] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bf4c80b4-bc0c-4198-9010-74fc50707745 could not be found. [ 1516.623810] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1516.623997] env[69027]: INFO nova.compute.manager [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1516.626523] env[69027]: DEBUG oslo.service.loopingcall [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1516.632205] env[69027]: DEBUG nova.compute.manager [-] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1516.632263] env[69027]: DEBUG nova.network.neutron [-] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1516.642707] env[69027]: DEBUG oslo_concurrency.lockutils [None req-3f8ced9e-1cdd-487e-91f8-81d81e8e4d95 tempest-ServerRescueTestJSON-1099885945 tempest-ServerRescueTestJSON-1099885945-project-member] Lock "fc2ddcd1-d7cf-45d3-903f-247b00f48f2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.722s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.654744] env[69027]: DEBUG nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1516.663568] env[69027]: DEBUG nova.network.neutron [-] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.672812] env[69027]: INFO nova.compute.manager [-] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] Took 0.04 seconds to deallocate network for instance. [ 1516.682135] env[69027]: DEBUG oslo_vmware.rw_handles [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/398b0c1f-658e-4109-8170-7db91c94eec8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1516.750021] env[69027]: DEBUG oslo_vmware.rw_handles [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1516.750200] env[69027]: DEBUG oslo_vmware.rw_handles [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/398b0c1f-658e-4109-8170-7db91c94eec8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1516.765731] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.805451] env[69027]: DEBUG oslo_concurrency.lockutils [None req-306e4d72-81cf-4647-a1b8-e301e858bfc9 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "bf4c80b4-bc0c-4198-9010-74fc50707745" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.238s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.806301] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "bf4c80b4-bc0c-4198-9010-74fc50707745" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 374.731s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.806490] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: bf4c80b4-bc0c-4198-9010-74fc50707745] During sync_power_state the instance has a pending task (deleting). Skip. [ 1516.806693] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "bf4c80b4-bc0c-4198-9010-74fc50707745" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.872631] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb495a7-1eda-4f63-9110-70d2a8ecc21c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.880784] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3315028-79be-48b1-a5f3-5e5a225c4c06 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.913057] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ada61d-6b63-429f-808f-b43242dc09e7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.920490] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb65db66-da03-4fe4-8b42-2b083fbcf175 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.934190] env[69027]: DEBUG nova.compute.provider_tree [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1516.944111] env[69027]: DEBUG nova.scheduler.client.report [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1516.959864] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.436s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.960626] env[69027]: ERROR nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Traceback (most recent call last): [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] result = getattr(controller, method)(*args, **kwargs) [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._get(image_id) [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] resp, body = self.http_client.get(url, headers=header) [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.request(url, 'GET', **kwargs) [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._handle_response(resp) [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise exc.from_response(resp, resp.content) [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] During handling of the above exception, another exception occurred: [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Traceback (most recent call last): [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self.driver.spawn(context, instance, image_meta, [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._fetch_image_if_missing(context, vi) [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] image_fetch(context, vi, tmp_image_ds_loc) [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] images.fetch_image( [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] metadata = IMAGE_API.get(context, image_ref) [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1516.960626] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return session.show(context, image_id, [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] _reraise_translated_image_exception(image_id) [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise new_exc.with_traceback(exc_trace) [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] result = getattr(controller, method)(*args, **kwargs) [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._get(image_id) [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] resp, body = self.http_client.get(url, headers=header) [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.request(url, 'GET', **kwargs) [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._handle_response(resp) [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise exc.from_response(resp, resp.content) [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] nova.exception.ImageNotAuthorized: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1516.962232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1516.962232] env[69027]: DEBUG nova.compute.utils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1516.963418] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.197s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.963931] env[69027]: INFO nova.compute.claims [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1516.966509] env[69027]: DEBUG nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Build of instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 was re-scheduled: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1516.967185] env[69027]: DEBUG nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1516.967373] env[69027]: DEBUG nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1516.967533] env[69027]: DEBUG nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1516.967836] env[69027]: DEBUG nova.network.neutron [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1517.094998] env[69027]: DEBUG neutronclient.v2_0.client [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69027) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1517.096278] env[69027]: ERROR nova.compute.manager [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Traceback (most recent call last): [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] result = getattr(controller, method)(*args, **kwargs) [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._get(image_id) [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] resp, body = self.http_client.get(url, headers=header) [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.request(url, 'GET', **kwargs) [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._handle_response(resp) [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise exc.from_response(resp, resp.content) [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] During handling of the above exception, another exception occurred: [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Traceback (most recent call last): [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self.driver.spawn(context, instance, image_meta, [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._fetch_image_if_missing(context, vi) [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] image_fetch(context, vi, tmp_image_ds_loc) [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] images.fetch_image( [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] metadata = IMAGE_API.get(context, image_ref) [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1517.096278] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return session.show(context, image_id, [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] _reraise_translated_image_exception(image_id) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise new_exc.with_traceback(exc_trace) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] result = getattr(controller, method)(*args, **kwargs) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._get(image_id) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] resp, body = self.http_client.get(url, headers=header) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.request(url, 'GET', **kwargs) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self._handle_response(resp) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise exc.from_response(resp, resp.content) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] nova.exception.ImageNotAuthorized: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] During handling of the above exception, another exception occurred: [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Traceback (most recent call last): [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._build_and_run_instance(context, instance, image, [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise exception.RescheduledException( [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] nova.exception.RescheduledException: Build of instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 was re-scheduled: Not authorized for image 1f242793-8cbc-47db-8e09-30ca2e488bdf. [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] During handling of the above exception, another exception occurred: [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Traceback (most recent call last): [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] exception_handler_v20(status_code, error_body) [ 1517.097232] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise client_exc(message=error_message, [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Neutron server returns request_ids: ['req-bc2a226c-5c76-43ed-8f50-bee02fec8413'] [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] During handling of the above exception, another exception occurred: [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Traceback (most recent call last): [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._deallocate_network(context, instance, requested_networks) [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self.network_api.deallocate_for_instance( [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] data = neutron.list_ports(**search_opts) [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.list('ports', self.ports_path, retrieve_all, [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] for r in self._pagination(collection, path, **params): [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] res = self.get(path, params=params) [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.retry_request("GET", action, body=body, [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.do_request(method, action, body=body, [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._handle_fault_response(status_code, replybody, resp) [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise exception.Unauthorized() [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] nova.exception.Unauthorized: Not authorized. [ 1517.098476] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.178116] env[69027]: INFO nova.scheduler.client.report [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Deleted allocations for instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 [ 1517.200665] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0048072e-12c0-4db9-9d01-5bcc076ff22a tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "90a6375b-4834-406d-abd5-5cf47b7cfc12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 526.464s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.201784] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "90a6375b-4834-406d-abd5-5cf47b7cfc12" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 375.126s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.201978] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] During sync_power_state the instance has a pending task (spawning). Skip. [ 1517.202174] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "90a6375b-4834-406d-abd5-5cf47b7cfc12" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.202797] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "90a6375b-4834-406d-abd5-5cf47b7cfc12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 330.435s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.203020] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Acquiring lock "90a6375b-4834-406d-abd5-5cf47b7cfc12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.203227] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "90a6375b-4834-406d-abd5-5cf47b7cfc12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.203437] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "90a6375b-4834-406d-abd5-5cf47b7cfc12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.205303] env[69027]: INFO nova.compute.manager [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Terminating instance [ 1517.207030] env[69027]: DEBUG nova.compute.manager [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1517.207229] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1517.207491] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-290f0c0a-07e2-4867-b5fd-01a412220d39 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.219323] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19da54d-c98c-4246-944d-9abd1fadbc1d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.231687] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1356327-f7b5-4fc2-92ad-56108dd5d27d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.234434] env[69027]: DEBUG nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1517.243586] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110f1eff-ec01-4895-8ba0-cc96f21d80f2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.255167] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 90a6375b-4834-406d-abd5-5cf47b7cfc12 could not be found. [ 1517.255167] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1517.255305] env[69027]: INFO nova.compute.manager [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1517.255564] env[69027]: DEBUG oslo.service.loopingcall [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1517.256268] env[69027]: DEBUG nova.compute.manager [-] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1517.256412] env[69027]: DEBUG nova.network.neutron [-] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1517.288882] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433edc6b-71e1-438a-9ec3-5d2c4a45cf5c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.299907] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0faea6f2-bfe9-4268-9a71-bf5387d195c2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.304855] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.315510] env[69027]: DEBUG nova.compute.provider_tree [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1517.324295] env[69027]: DEBUG nova.scheduler.client.report [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1517.340560] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.378s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.340837] env[69027]: DEBUG nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1517.343292] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.039s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.345690] env[69027]: INFO nova.compute.claims [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1517.382889] env[69027]: DEBUG nova.compute.utils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1517.383757] env[69027]: DEBUG nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1517.383975] env[69027]: DEBUG nova.network.neutron [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1517.396129] env[69027]: DEBUG nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1517.404347] env[69027]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69027) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1517.404670] env[69027]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-2078d5ea-7407-4f32-824d-9e8288bbda16'] [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1517.405204] env[69027]: ERROR oslo.service.loopingcall [ 1517.406801] env[69027]: ERROR nova.compute.manager [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1517.436169] env[69027]: ERROR nova.compute.manager [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Traceback (most recent call last): [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] exception_handler_v20(status_code, error_body) [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise client_exc(message=error_message, [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Neutron server returns request_ids: ['req-2078d5ea-7407-4f32-824d-9e8288bbda16'] [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] During handling of the above exception, another exception occurred: [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Traceback (most recent call last): [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._delete_instance(context, instance, bdms) [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._shutdown_instance(context, instance, bdms) [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._try_deallocate_network(context, instance, requested_networks) [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] with excutils.save_and_reraise_exception(): [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self.force_reraise() [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise self.value [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] _deallocate_network_with_retries() [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return evt.wait() [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] result = hub.switch() [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.greenlet.switch() [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] result = func(*self.args, **self.kw) [ 1517.436169] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] result = f(*args, **kwargs) [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._deallocate_network( [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self.network_api.deallocate_for_instance( [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] data = neutron.list_ports(**search_opts) [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.list('ports', self.ports_path, retrieve_all, [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] for r in self._pagination(collection, path, **params): [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] res = self.get(path, params=params) [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.retry_request("GET", action, body=body, [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] return self.do_request(method, action, body=body, [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] ret = obj(*args, **kwargs) [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] self._handle_fault_response(status_code, replybody, resp) [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1517.437218] env[69027]: ERROR nova.compute.manager [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] [ 1517.461250] env[69027]: DEBUG nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1517.467836] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Lock "90a6375b-4834-406d-abd5-5cf47b7cfc12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.265s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.474988] env[69027]: DEBUG nova.policy [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3415773728cf444c9717c5e94fe5cfe7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9960f5a0bb3545f498c27eb27ec38d19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1517.492180] env[69027]: DEBUG nova.virt.hardware [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1517.492282] env[69027]: DEBUG nova.virt.hardware [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1517.492434] env[69027]: DEBUG nova.virt.hardware [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1517.492617] env[69027]: DEBUG nova.virt.hardware [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1517.492765] env[69027]: DEBUG nova.virt.hardware [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1517.492915] env[69027]: DEBUG nova.virt.hardware [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1517.493318] env[69027]: DEBUG nova.virt.hardware [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1517.493474] env[69027]: DEBUG nova.virt.hardware [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1517.493650] env[69027]: DEBUG nova.virt.hardware [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1517.493817] env[69027]: DEBUG nova.virt.hardware [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1517.493988] env[69027]: DEBUG nova.virt.hardware [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1517.494904] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749104df-9776-4401-8ce0-3c84ad4b2148 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.513053] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c5d928d-f5d1-44ed-8173-90017f6be21b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.536215] env[69027]: INFO nova.compute.manager [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] [instance: 90a6375b-4834-406d-abd5-5cf47b7cfc12] Successfully reverted task state from None on failure for instance. [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server [None req-d0f83a79-73c8-4dfe-b59e-25f037d28e71 tempest-MigrationsAdminTest-1790921170 tempest-MigrationsAdminTest-1790921170-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-2078d5ea-7407-4f32-824d-9e8288bbda16'] [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1517.541999] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server raise self.value [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1517.543549] env[69027]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1517.545626] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1517.545626] env[69027]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1517.545626] env[69027]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1517.545626] env[69027]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1517.545626] env[69027]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1517.545626] env[69027]: ERROR oslo_messaging.rpc.server [ 1517.606399] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a160f46b-b59d-4a93-b59a-4fff090ea560 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.616062] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf15ef31-4e47-412c-93a6-6164d0850014 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.645730] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d8dff6-b5b8-4f58-bb13-578cf02043a0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.652920] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cbeb3f-c6a9-4c0e-a33f-960ef7cfde29 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.666044] env[69027]: DEBUG nova.compute.provider_tree [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1517.674966] env[69027]: DEBUG nova.scheduler.client.report [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1517.690567] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.347s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.691141] env[69027]: DEBUG nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1517.745906] env[69027]: DEBUG nova.compute.utils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1517.747688] env[69027]: DEBUG nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1517.747918] env[69027]: DEBUG nova.network.neutron [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1517.756349] env[69027]: DEBUG nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1517.839715] env[69027]: DEBUG nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1517.871376] env[69027]: DEBUG nova.virt.hardware [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1517.871376] env[69027]: DEBUG nova.virt.hardware [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1517.871376] env[69027]: DEBUG nova.virt.hardware [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1517.871376] env[69027]: DEBUG nova.virt.hardware [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1517.871376] env[69027]: DEBUG nova.virt.hardware [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1517.871376] env[69027]: DEBUG nova.virt.hardware [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1517.871376] env[69027]: DEBUG nova.virt.hardware [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1517.871376] env[69027]: DEBUG nova.virt.hardware [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1517.871376] env[69027]: DEBUG nova.virt.hardware [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1517.871376] env[69027]: DEBUG nova.virt.hardware [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1517.871376] env[69027]: DEBUG nova.virt.hardware [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1517.871876] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585e5238-64f7-4d87-b316-ea98a05cb93d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.875938] env[69027]: DEBUG nova.policy [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d171178bb9345559b88d0918e33a270', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f56d438f5aa54204b72f10db549d0535', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1517.884720] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a87f689-d591-4414-83e1-44e381f5bc9b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.893873] env[69027]: DEBUG nova.network.neutron [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Successfully created port: 180f58b2-4198-4792-92d7-d83f5da74f63 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1518.713849] env[69027]: DEBUG nova.network.neutron [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Successfully created port: 5e32e54b-adf5-427f-9c32-2da5fd4462fa {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1519.232941] env[69027]: DEBUG nova.compute.manager [req-3c1447f7-002b-45a2-bd4a-583f8324d097 req-01f3764f-e44e-4697-b2fb-2e37c59aafdd service nova] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Received event network-vif-plugged-180f58b2-4198-4792-92d7-d83f5da74f63 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1519.233251] env[69027]: DEBUG oslo_concurrency.lockutils [req-3c1447f7-002b-45a2-bd4a-583f8324d097 req-01f3764f-e44e-4697-b2fb-2e37c59aafdd service nova] Acquiring lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.233465] env[69027]: DEBUG oslo_concurrency.lockutils [req-3c1447f7-002b-45a2-bd4a-583f8324d097 req-01f3764f-e44e-4697-b2fb-2e37c59aafdd service nova] Lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.233631] env[69027]: DEBUG oslo_concurrency.lockutils [req-3c1447f7-002b-45a2-bd4a-583f8324d097 req-01f3764f-e44e-4697-b2fb-2e37c59aafdd service nova] Lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.233801] env[69027]: DEBUG nova.compute.manager [req-3c1447f7-002b-45a2-bd4a-583f8324d097 req-01f3764f-e44e-4697-b2fb-2e37c59aafdd service nova] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] No waiting events found dispatching network-vif-plugged-180f58b2-4198-4792-92d7-d83f5da74f63 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1519.233964] env[69027]: WARNING nova.compute.manager [req-3c1447f7-002b-45a2-bd4a-583f8324d097 req-01f3764f-e44e-4697-b2fb-2e37c59aafdd service nova] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Received unexpected event network-vif-plugged-180f58b2-4198-4792-92d7-d83f5da74f63 for instance with vm_state building and task_state spawning. [ 1519.358580] env[69027]: DEBUG nova.network.neutron [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Successfully updated port: 180f58b2-4198-4792-92d7-d83f5da74f63 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1519.368229] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquiring lock "refresh_cache-4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.368374] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquired lock "refresh_cache-4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.368521] env[69027]: DEBUG nova.network.neutron [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1519.432579] env[69027]: DEBUG nova.network.neutron [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1519.744175] env[69027]: DEBUG nova.network.neutron [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Updating instance_info_cache with network_info: [{"id": "180f58b2-4198-4792-92d7-d83f5da74f63", "address": "fa:16:3e:4f:2f:99", "network": {"id": "096b417c-7a59-404d-9fd6-2654f20b25d7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1996572259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9960f5a0bb3545f498c27eb27ec38d19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap180f58b2-41", "ovs_interfaceid": "180f58b2-4198-4792-92d7-d83f5da74f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.754839] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Releasing lock "refresh_cache-4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.755147] env[69027]: DEBUG nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Instance network_info: |[{"id": "180f58b2-4198-4792-92d7-d83f5da74f63", "address": "fa:16:3e:4f:2f:99", "network": {"id": "096b417c-7a59-404d-9fd6-2654f20b25d7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1996572259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9960f5a0bb3545f498c27eb27ec38d19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap180f58b2-41", "ovs_interfaceid": "180f58b2-4198-4792-92d7-d83f5da74f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1519.755556] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:2f:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '180f58b2-4198-4792-92d7-d83f5da74f63', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1519.763096] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Creating folder: Project (9960f5a0bb3545f498c27eb27ec38d19). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1519.763651] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d1ed75a-e6c2-489d-98ec-764053d9470e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.775554] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Created folder: Project (9960f5a0bb3545f498c27eb27ec38d19) in parent group-v677321. [ 1519.775742] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Creating folder: Instances. Parent ref: group-v677403. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1519.775966] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1bfb56e-7ad5-4c6b-98d8-5de5442cffe6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.784211] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Created folder: Instances in parent group-v677403. [ 1519.784767] env[69027]: DEBUG oslo.service.loopingcall [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.785032] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1519.785241] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86bc11e4-3e18-4559-802b-682a839d47cb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.804620] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1519.804620] env[69027]: value = "task-3395219" [ 1519.804620] env[69027]: _type = "Task" [ 1519.804620] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.808146] env[69027]: DEBUG nova.network.neutron [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Successfully updated port: 5e32e54b-adf5-427f-9c32-2da5fd4462fa {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1519.816034] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395219, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.823643] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquiring lock "refresh_cache-d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.823643] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquired lock "refresh_cache-d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.823643] env[69027]: DEBUG nova.network.neutron [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1519.861456] env[69027]: DEBUG nova.network.neutron [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1520.029390] env[69027]: DEBUG nova.network.neutron [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Updating instance_info_cache with network_info: [{"id": "5e32e54b-adf5-427f-9c32-2da5fd4462fa", "address": "fa:16:3e:d6:b5:74", "network": {"id": "1dfafb29-86e4-4fb1-b0e1-46a729a6f807", "bridge": "br-int", "label": "tempest-ServersTestJSON-88689892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f56d438f5aa54204b72f10db549d0535", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e32e54b-ad", "ovs_interfaceid": "5e32e54b-adf5-427f-9c32-2da5fd4462fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.041773] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Releasing lock "refresh_cache-d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.042130] env[69027]: DEBUG nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Instance network_info: |[{"id": "5e32e54b-adf5-427f-9c32-2da5fd4462fa", "address": "fa:16:3e:d6:b5:74", "network": {"id": "1dfafb29-86e4-4fb1-b0e1-46a729a6f807", "bridge": "br-int", "label": "tempest-ServersTestJSON-88689892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f56d438f5aa54204b72f10db549d0535", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e32e54b-ad", "ovs_interfaceid": "5e32e54b-adf5-427f-9c32-2da5fd4462fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1520.042538] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:b5:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '111a2767-1b06-4fe5-852b-40c9b5a699fd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e32e54b-adf5-427f-9c32-2da5fd4462fa', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1520.049968] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Creating folder: Project (f56d438f5aa54204b72f10db549d0535). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1520.050640] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-760f63a8-efdd-499a-95af-b7b926775c21 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.060972] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Created folder: Project (f56d438f5aa54204b72f10db549d0535) in parent group-v677321. [ 1520.061191] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Creating folder: Instances. Parent ref: group-v677406. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1520.061416] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b61976f6-f7f8-433c-89ce-5977ce446c93 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.070220] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Created folder: Instances in parent group-v677406. [ 1520.070437] env[69027]: DEBUG oslo.service.loopingcall [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1520.070615] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1520.070802] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d63b5dc4-a575-4a81-b6fe-38fdc9db4f0c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.089518] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1520.089518] env[69027]: value = "task-3395222" [ 1520.089518] env[69027]: _type = "Task" [ 1520.089518] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.096764] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395222, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.315063] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395219, 'name': CreateVM_Task, 'duration_secs': 0.293537} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.315063] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1520.315694] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.315866] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.316210] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1520.316459] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddfbeaf8-8dae-40d2-abec-6289c9cd441a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.321094] env[69027]: DEBUG oslo_vmware.api [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Waiting for the task: (returnval){ [ 1520.321094] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]525fd3e9-80cc-183a-73f0-35b605933a67" [ 1520.321094] env[69027]: _type = "Task" [ 1520.321094] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.328892] env[69027]: DEBUG oslo_vmware.api [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]525fd3e9-80cc-183a-73f0-35b605933a67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.599085] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395222, 'name': CreateVM_Task, 'duration_secs': 0.275101} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.599323] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1520.600026] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.833183] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.833464] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1520.833677] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.833912] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.834262] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1520.834533] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-198e7287-5c75-4a23-b940-bd4d56b48d79 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.839058] env[69027]: DEBUG oslo_vmware.api [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Waiting for the task: (returnval){ [ 1520.839058] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]523a31b1-f54e-bf0b-40c6-c4d969845ef3" [ 1520.839058] env[69027]: _type = "Task" [ 1520.839058] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.847160] env[69027]: DEBUG oslo_vmware.api [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]523a31b1-f54e-bf0b-40c6-c4d969845ef3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.266440] env[69027]: DEBUG nova.compute.manager [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Received event network-changed-180f58b2-4198-4792-92d7-d83f5da74f63 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1521.266440] env[69027]: DEBUG nova.compute.manager [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Refreshing instance network info cache due to event network-changed-180f58b2-4198-4792-92d7-d83f5da74f63. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1521.266750] env[69027]: DEBUG oslo_concurrency.lockutils [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] Acquiring lock "refresh_cache-4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.266750] env[69027]: DEBUG oslo_concurrency.lockutils [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] Acquired lock "refresh_cache-4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.266891] env[69027]: DEBUG nova.network.neutron [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Refreshing network info cache for port 180f58b2-4198-4792-92d7-d83f5da74f63 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1521.349751] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.350039] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1521.350257] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.799010] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.799257] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.872879] env[69027]: DEBUG nova.network.neutron [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Updated VIF entry in instance network info cache for port 180f58b2-4198-4792-92d7-d83f5da74f63. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1521.873265] env[69027]: DEBUG nova.network.neutron [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Updating instance_info_cache with network_info: [{"id": "180f58b2-4198-4792-92d7-d83f5da74f63", "address": "fa:16:3e:4f:2f:99", "network": {"id": "096b417c-7a59-404d-9fd6-2654f20b25d7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1996572259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9960f5a0bb3545f498c27eb27ec38d19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap180f58b2-41", "ovs_interfaceid": "180f58b2-4198-4792-92d7-d83f5da74f63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.882827] env[69027]: DEBUG oslo_concurrency.lockutils [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] Releasing lock "refresh_cache-4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.883696] env[69027]: DEBUG nova.compute.manager [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Received event network-vif-plugged-5e32e54b-adf5-427f-9c32-2da5fd4462fa {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1521.883696] env[69027]: DEBUG oslo_concurrency.lockutils [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] Acquiring lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.883696] env[69027]: DEBUG oslo_concurrency.lockutils [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] Lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.883696] env[69027]: DEBUG oslo_concurrency.lockutils [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] Lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.883851] env[69027]: DEBUG nova.compute.manager [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] No waiting events found dispatching network-vif-plugged-5e32e54b-adf5-427f-9c32-2da5fd4462fa {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1521.884146] env[69027]: WARNING nova.compute.manager [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Received unexpected event network-vif-plugged-5e32e54b-adf5-427f-9c32-2da5fd4462fa for instance with vm_state building and task_state spawning. [ 1521.884355] env[69027]: DEBUG nova.compute.manager [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Received event network-changed-5e32e54b-adf5-427f-9c32-2da5fd4462fa {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1521.884521] env[69027]: DEBUG nova.compute.manager [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Refreshing instance network info cache due to event network-changed-5e32e54b-adf5-427f-9c32-2da5fd4462fa. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1521.884735] env[69027]: DEBUG oslo_concurrency.lockutils [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] Acquiring lock "refresh_cache-d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.884894] env[69027]: DEBUG oslo_concurrency.lockutils [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] Acquired lock "refresh_cache-d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.885176] env[69027]: DEBUG nova.network.neutron [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Refreshing network info cache for port 5e32e54b-adf5-427f-9c32-2da5fd4462fa {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1522.166015] env[69027]: DEBUG nova.network.neutron [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Updated VIF entry in instance network info cache for port 5e32e54b-adf5-427f-9c32-2da5fd4462fa. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1522.166391] env[69027]: DEBUG nova.network.neutron [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Updating instance_info_cache with network_info: [{"id": "5e32e54b-adf5-427f-9c32-2da5fd4462fa", "address": "fa:16:3e:d6:b5:74", "network": {"id": "1dfafb29-86e4-4fb1-b0e1-46a729a6f807", "bridge": "br-int", "label": "tempest-ServersTestJSON-88689892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f56d438f5aa54204b72f10db549d0535", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e32e54b-ad", "ovs_interfaceid": "5e32e54b-adf5-427f-9c32-2da5fd4462fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.176749] env[69027]: DEBUG oslo_concurrency.lockutils [req-677f42c6-49d4-4054-90d8-18a74eb1e5b7 req-18316205-c031-4a6c-8089-1a210bca7afe service nova] Releasing lock "refresh_cache-d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.771122] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1560.782605] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.782826] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.782991] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.783206] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1560.784359] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5017f72e-6a6d-44a0-98a6-10f8d9cf3f9d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.793113] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7e7b39-0067-4848-ad09-25c5d75c9988 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.808224] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f059da-a20a-4e79-8a78-962a6bf0f620 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.814392] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c250bd-5422-4687-b33c-250ae406ca22 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.842542] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180954MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1560.842681] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.842870] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.912101] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a907f1ab-3540-4bc0-8389-005233cca940 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.912270] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1715faa2-86ea-49f9-a993-1003aea54384 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.912399] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance edc3a0ff-c592-47b8-9753-1b4831bee576 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.912520] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.912638] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.912756] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.912871] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03d9d361-da15-4fb7-acfb-049098183bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.912989] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b930e792-b0a8-45e4-9330-befac22182b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.913115] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.913230] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1560.924105] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1d3442ae-f46f-433d-bccb-f323463e3a21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1560.934526] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1560.943831] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ed6243ed-cd06-46cd-b592-f05d5cd83139 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1560.954163] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1560.954367] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1560.954518] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1561.106207] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d86c80b-f77f-40f8-93a2-256a5bdae893 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.113319] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc2b861-0f26-46ae-8067-3eea509c9e8b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.156671] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc51ad6-b477-4b4d-92e6-33c2d8615e1b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.165707] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88053433-d95d-4470-816a-49c40a1e024b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.182014] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.190574] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1561.203322] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1561.203497] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.361s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.200013] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.200332] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.200409] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1563.200530] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1563.213395] env[69027]: WARNING oslo_vmware.rw_handles [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1563.213395] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1563.213395] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1563.213395] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1563.213395] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1563.213395] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1563.213395] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1563.213395] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1563.213395] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1563.213395] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1563.213395] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1563.213395] env[69027]: ERROR oslo_vmware.rw_handles [ 1563.213776] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/398b0c1f-658e-4109-8170-7db91c94eec8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1563.215986] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1563.216274] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Copying Virtual Disk [datastore2] vmware_temp/398b0c1f-658e-4109-8170-7db91c94eec8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/398b0c1f-658e-4109-8170-7db91c94eec8/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1563.216557] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-431f3a0e-2794-4de6-a847-4df95bf9d187 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.222424] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1563.222574] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1563.222706] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1563.222862] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1563.223030] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1563.223190] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1563.223326] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1563.223449] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1563.223569] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1563.223686] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1563.223802] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1563.225454] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.225721] env[69027]: DEBUG oslo_vmware.api [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for the task: (returnval){ [ 1563.225721] env[69027]: value = "task-3395223" [ 1563.225721] env[69027]: _type = "Task" [ 1563.225721] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.234468] env[69027]: DEBUG oslo_vmware.api [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Task: {'id': task-3395223, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.736246] env[69027]: DEBUG oslo_vmware.exceptions [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1563.736531] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.737105] env[69027]: ERROR nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1563.737105] env[69027]: Faults: ['InvalidArgument'] [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] Traceback (most recent call last): [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] yield resources [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] self.driver.spawn(context, instance, image_meta, [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] self._fetch_image_if_missing(context, vi) [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] image_cache(vi, tmp_image_ds_loc) [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] vm_util.copy_virtual_disk( [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] session._wait_for_task(vmdk_copy_task) [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] return self.wait_for_task(task_ref) [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] return evt.wait() [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] result = hub.switch() [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] return self.greenlet.switch() [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] self.f(*self.args, **self.kw) [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] raise exceptions.translate_fault(task_info.error) [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] Faults: ['InvalidArgument'] [ 1563.737105] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] [ 1563.738095] env[69027]: INFO nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Terminating instance [ 1563.738932] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.739166] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1563.739405] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1ec0f2c-fded-4c6c-b040-89d1f129a629 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.741510] env[69027]: DEBUG nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1563.741710] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1563.742668] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310b4d80-c730-4128-a002-71690365cdb1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.749107] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1563.749312] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc4a3512-3b6d-4611-a328-f165801e4a92 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.751321] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1563.751498] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1563.752401] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc526741-c244-4080-aec9-5159c299c436 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.756735] env[69027]: DEBUG oslo_vmware.api [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Waiting for the task: (returnval){ [ 1563.756735] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52384681-568b-4450-e79e-fb90be7d0e9f" [ 1563.756735] env[69027]: _type = "Task" [ 1563.756735] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.763446] env[69027]: DEBUG oslo_vmware.api [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52384681-568b-4450-e79e-fb90be7d0e9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.770881] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1563.816291] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1563.816512] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1563.816672] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Deleting the datastore file [datastore2] a907f1ab-3540-4bc0-8389-005233cca940 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1563.816940] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14c2f23b-27db-41d8-ac25-3030ed1eab64 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.823307] env[69027]: DEBUG oslo_vmware.api [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for the task: (returnval){ [ 1563.823307] env[69027]: value = "task-3395225" [ 1563.823307] env[69027]: _type = "Task" [ 1563.823307] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.830572] env[69027]: DEBUG oslo_vmware.api [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Task: {'id': task-3395225, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.266872] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1564.269186] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Creating directory with path [datastore2] vmware_temp/c51473d7-8343-47e2-bc5d-ece1bdb1943d/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1564.269186] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97c6eac4-a449-48a9-8b72-11a66e6c30ff {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.279778] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Created directory with path [datastore2] vmware_temp/c51473d7-8343-47e2-bc5d-ece1bdb1943d/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1564.280159] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Fetch image to [datastore2] vmware_temp/c51473d7-8343-47e2-bc5d-ece1bdb1943d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1564.280281] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/c51473d7-8343-47e2-bc5d-ece1bdb1943d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1564.281838] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751256f5-957b-4edb-9c77-b0d27d18c846 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.288658] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4069d19-d158-484c-8271-030aa397c2b6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.298618] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d90ca4e-5e2b-4b0e-b4f0-7c580ad3f518 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.337195] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e709df5-0678-4c50-bd3f-5b124429b3b7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.344910] env[69027]: DEBUG oslo_vmware.api [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Task: {'id': task-3395225, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.06665} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.346359] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1564.346529] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1564.346865] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1564.347010] env[69027]: INFO nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1564.348866] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-49df259a-15f7-405f-bf75-b33eb85ed665 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.350823] env[69027]: DEBUG nova.compute.claims [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1564.350992] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.351227] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.377872] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1564.509438] env[69027]: DEBUG oslo_vmware.rw_handles [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c51473d7-8343-47e2-bc5d-ece1bdb1943d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1564.569985] env[69027]: DEBUG oslo_vmware.rw_handles [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1564.570197] env[69027]: DEBUG oslo_vmware.rw_handles [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c51473d7-8343-47e2-bc5d-ece1bdb1943d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1564.608732] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a000e40e-78da-4480-a7db-502161f6cbe0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.616255] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e1bf81-e397-49c0-b8bf-cd94c076842b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.647354] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65847ab1-14b1-418b-801c-1d9992702e95 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.653777] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073fa9fb-adbc-44ec-9b6e-07f11335cfc7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.666397] env[69027]: DEBUG nova.compute.provider_tree [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1564.674691] env[69027]: DEBUG nova.scheduler.client.report [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1564.689188] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.338s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.689694] env[69027]: ERROR nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1564.689694] env[69027]: Faults: ['InvalidArgument'] [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] Traceback (most recent call last): [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] self.driver.spawn(context, instance, image_meta, [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] self._fetch_image_if_missing(context, vi) [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] image_cache(vi, tmp_image_ds_loc) [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] vm_util.copy_virtual_disk( [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] session._wait_for_task(vmdk_copy_task) [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] return self.wait_for_task(task_ref) [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] return evt.wait() [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] result = hub.switch() [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] return self.greenlet.switch() [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] self.f(*self.args, **self.kw) [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] raise exceptions.translate_fault(task_info.error) [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] Faults: ['InvalidArgument'] [ 1564.689694] env[69027]: ERROR nova.compute.manager [instance: a907f1ab-3540-4bc0-8389-005233cca940] [ 1564.690508] env[69027]: DEBUG nova.compute.utils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1564.691730] env[69027]: DEBUG nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Build of instance a907f1ab-3540-4bc0-8389-005233cca940 was re-scheduled: A specified parameter was not correct: fileType [ 1564.691730] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1564.692115] env[69027]: DEBUG nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1564.692296] env[69027]: DEBUG nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1564.692479] env[69027]: DEBUG nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1564.692646] env[69027]: DEBUG nova.network.neutron [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1565.017655] env[69027]: DEBUG nova.network.neutron [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1565.029985] env[69027]: INFO nova.compute.manager [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Took 0.34 seconds to deallocate network for instance. [ 1565.142079] env[69027]: INFO nova.scheduler.client.report [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Deleted allocations for instance a907f1ab-3540-4bc0-8389-005233cca940 [ 1565.176037] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ab1f4641-abfe-4b3c-9d98-66978d039277 tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "a907f1ab-3540-4bc0-8389-005233cca940" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 569.214s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.177032] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "a907f1ab-3540-4bc0-8389-005233cca940" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 373.449s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.178418] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Acquiring lock "a907f1ab-3540-4bc0-8389-005233cca940-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.178418] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "a907f1ab-3540-4bc0-8389-005233cca940-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.178418] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "a907f1ab-3540-4bc0-8389-005233cca940-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.180966] env[69027]: INFO nova.compute.manager [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Terminating instance [ 1565.183132] env[69027]: DEBUG nova.compute.manager [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1565.183303] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1565.183819] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-306b6844-0bd4-4f90-be85-a33b8d9125ff {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.195406] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c936dd-60e3-47dc-8822-6fa226c4d9b7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.210825] env[69027]: DEBUG nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1565.234114] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a907f1ab-3540-4bc0-8389-005233cca940 could not be found. [ 1565.234114] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1565.234363] env[69027]: INFO nova.compute.manager [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1565.234529] env[69027]: DEBUG oslo.service.loopingcall [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1565.234741] env[69027]: DEBUG nova.compute.manager [-] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1565.234842] env[69027]: DEBUG nova.network.neutron [-] [instance: a907f1ab-3540-4bc0-8389-005233cca940] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1565.268848] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.269411] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.270810] env[69027]: INFO nova.compute.claims [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1565.275528] env[69027]: DEBUG nova.network.neutron [-] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1565.283759] env[69027]: INFO nova.compute.manager [-] [instance: a907f1ab-3540-4bc0-8389-005233cca940] Took 0.05 seconds to deallocate network for instance. [ 1565.383138] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f61806b6-05c7-47bb-8a71-8532b4a5e54e tempest-VolumesAdminNegativeTest-1413569638 tempest-VolumesAdminNegativeTest-1413569638-project-member] Lock "a907f1ab-3540-4bc0-8389-005233cca940" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.206s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.481273] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28a5223-3c5d-42f7-b9d3-ecbfdb9fe219 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.488783] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecc213f-0b09-4397-b3a9-8fa7e5c79ee0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.518416] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687739d3-0063-44fb-9fff-9c2970a49d19 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.525835] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b5ef47-43eb-47fd-bd27-62faf8392a56 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.538206] env[69027]: DEBUG nova.compute.provider_tree [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1565.546590] env[69027]: DEBUG nova.scheduler.client.report [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1565.560149] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.291s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.560603] env[69027]: DEBUG nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1565.590718] env[69027]: DEBUG nova.compute.utils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1565.591837] env[69027]: DEBUG nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1565.592014] env[69027]: DEBUG nova.network.neutron [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1565.600480] env[69027]: DEBUG nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1565.650952] env[69027]: DEBUG nova.policy [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58e66320388b4e8294205232eec8cfaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '947495558dff46eb9951fadfc3d12d32', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1565.666264] env[69027]: DEBUG nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1565.692489] env[69027]: DEBUG nova.virt.hardware [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1565.692489] env[69027]: DEBUG nova.virt.hardware [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1565.692489] env[69027]: DEBUG nova.virt.hardware [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1565.692489] env[69027]: DEBUG nova.virt.hardware [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1565.692489] env[69027]: DEBUG nova.virt.hardware [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1565.692489] env[69027]: DEBUG nova.virt.hardware [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1565.692489] env[69027]: DEBUG nova.virt.hardware [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1565.692489] env[69027]: DEBUG nova.virt.hardware [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1565.692994] env[69027]: DEBUG nova.virt.hardware [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1565.692994] env[69027]: DEBUG nova.virt.hardware [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1565.692994] env[69027]: DEBUG nova.virt.hardware [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1565.693796] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cada9500-9066-4496-a294-8f2e7b54fe00 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.701303] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268b9005-8720-49a7-845b-be0620dee88d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.771372] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1565.771557] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1565.771693] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1566.150118] env[69027]: DEBUG nova.network.neutron [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Successfully created port: 6ad24cba-4603-40c8-984d-fde1ebd7cc77 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1566.770846] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.877847] env[69027]: DEBUG nova.network.neutron [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Successfully updated port: 6ad24cba-4603-40c8-984d-fde1ebd7cc77 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1566.893498] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "refresh_cache-1d3442ae-f46f-433d-bccb-f323463e3a21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.893657] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquired lock "refresh_cache-1d3442ae-f46f-433d-bccb-f323463e3a21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.893808] env[69027]: DEBUG nova.network.neutron [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1566.930977] env[69027]: DEBUG nova.network.neutron [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1567.062822] env[69027]: DEBUG nova.compute.manager [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Received event network-vif-plugged-6ad24cba-4603-40c8-984d-fde1ebd7cc77 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1567.063193] env[69027]: DEBUG oslo_concurrency.lockutils [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] Acquiring lock "1d3442ae-f46f-433d-bccb-f323463e3a21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.064025] env[69027]: DEBUG oslo_concurrency.lockutils [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] Lock "1d3442ae-f46f-433d-bccb-f323463e3a21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.064025] env[69027]: DEBUG oslo_concurrency.lockutils [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] Lock "1d3442ae-f46f-433d-bccb-f323463e3a21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.064025] env[69027]: DEBUG nova.compute.manager [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] No waiting events found dispatching network-vif-plugged-6ad24cba-4603-40c8-984d-fde1ebd7cc77 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1567.064208] env[69027]: WARNING nova.compute.manager [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Received unexpected event network-vif-plugged-6ad24cba-4603-40c8-984d-fde1ebd7cc77 for instance with vm_state building and task_state spawning. [ 1567.064544] env[69027]: DEBUG nova.compute.manager [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Received event network-changed-6ad24cba-4603-40c8-984d-fde1ebd7cc77 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1567.064847] env[69027]: DEBUG nova.compute.manager [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Refreshing instance network info cache due to event network-changed-6ad24cba-4603-40c8-984d-fde1ebd7cc77. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1567.066060] env[69027]: DEBUG oslo_concurrency.lockutils [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] Acquiring lock "refresh_cache-1d3442ae-f46f-433d-bccb-f323463e3a21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.102240] env[69027]: DEBUG nova.network.neutron [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Updating instance_info_cache with network_info: [{"id": "6ad24cba-4603-40c8-984d-fde1ebd7cc77", "address": "fa:16:3e:46:19:13", "network": {"id": "c071ab13-b295-41c7-b89e-e6a7e7eb2ed2", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-170762747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947495558dff46eb9951fadfc3d12d32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ad24cba-46", "ovs_interfaceid": "6ad24cba-4603-40c8-984d-fde1ebd7cc77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.117049] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Releasing lock "refresh_cache-1d3442ae-f46f-433d-bccb-f323463e3a21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.117049] env[69027]: DEBUG nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Instance network_info: |[{"id": "6ad24cba-4603-40c8-984d-fde1ebd7cc77", "address": "fa:16:3e:46:19:13", "network": {"id": "c071ab13-b295-41c7-b89e-e6a7e7eb2ed2", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-170762747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947495558dff46eb9951fadfc3d12d32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ad24cba-46", "ovs_interfaceid": "6ad24cba-4603-40c8-984d-fde1ebd7cc77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1567.117275] env[69027]: DEBUG oslo_concurrency.lockutils [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] Acquired lock "refresh_cache-1d3442ae-f46f-433d-bccb-f323463e3a21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.117384] env[69027]: DEBUG nova.network.neutron [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Refreshing network info cache for port 6ad24cba-4603-40c8-984d-fde1ebd7cc77 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1567.118442] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:19:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ad24cba-4603-40c8-984d-fde1ebd7cc77', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1567.125780] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Creating folder: Project (947495558dff46eb9951fadfc3d12d32). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1567.128537] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9d92aaa-5e3e-4795-a5cc-2c2c20dcf879 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.139466] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Created folder: Project (947495558dff46eb9951fadfc3d12d32) in parent group-v677321. [ 1567.139662] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Creating folder: Instances. Parent ref: group-v677409. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1567.139884] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d4db6fc-d34a-41ef-8aa5-d3d173bf8ee9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.148035] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Created folder: Instances in parent group-v677409. [ 1567.148291] env[69027]: DEBUG oslo.service.loopingcall [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1567.148469] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1567.148705] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-965f57c1-d0a3-4cc9-a504-381398992a19 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.169262] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1567.169262] env[69027]: value = "task-3395228" [ 1567.169262] env[69027]: _type = "Task" [ 1567.169262] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.176175] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395228, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.399803] env[69027]: DEBUG nova.network.neutron [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Updated VIF entry in instance network info cache for port 6ad24cba-4603-40c8-984d-fde1ebd7cc77. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1567.400190] env[69027]: DEBUG nova.network.neutron [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Updating instance_info_cache with network_info: [{"id": "6ad24cba-4603-40c8-984d-fde1ebd7cc77", "address": "fa:16:3e:46:19:13", "network": {"id": "c071ab13-b295-41c7-b89e-e6a7e7eb2ed2", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-170762747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947495558dff46eb9951fadfc3d12d32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ad24cba-46", "ovs_interfaceid": "6ad24cba-4603-40c8-984d-fde1ebd7cc77", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.411373] env[69027]: DEBUG oslo_concurrency.lockutils [req-667e5850-9530-4326-85fc-0fc42840e36e req-5602c0c2-d715-459e-9a7b-4c2a8afef7ec service nova] Releasing lock "refresh_cache-1d3442ae-f46f-433d-bccb-f323463e3a21" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.681033] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395228, 'name': CreateVM_Task, 'duration_secs': 0.30088} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.681033] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1567.681033] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.681303] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.681428] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1567.681764] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-089772bc-ee8b-43b1-8f5e-71e72b83aae3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.686314] env[69027]: DEBUG oslo_vmware.api [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for the task: (returnval){ [ 1567.686314] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5252d90b-ba28-a68d-5002-7069589a05d9" [ 1567.686314] env[69027]: _type = "Task" [ 1567.686314] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.693753] env[69027]: DEBUG oslo_vmware.api [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5252d90b-ba28-a68d-5002-7069589a05d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.196496] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.196810] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1568.196942] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.771793] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1583.146372] env[69027]: DEBUG oslo_concurrency.lockutils [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquiring lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.210179] env[69027]: DEBUG oslo_concurrency.lockutils [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquiring lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.326028] env[69027]: WARNING oslo_vmware.rw_handles [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1614.326028] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1614.326028] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1614.326028] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1614.326028] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1614.326028] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1614.326028] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1614.326028] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1614.326028] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1614.326028] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1614.326028] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1614.326028] env[69027]: ERROR oslo_vmware.rw_handles [ 1614.326673] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/c51473d7-8343-47e2-bc5d-ece1bdb1943d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1614.328597] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1614.328867] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Copying Virtual Disk [datastore2] vmware_temp/c51473d7-8343-47e2-bc5d-ece1bdb1943d/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/c51473d7-8343-47e2-bc5d-ece1bdb1943d/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1614.329149] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29e1f867-2f45-4cc8-b02c-8cd84522b472 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.337094] env[69027]: DEBUG oslo_vmware.api [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Waiting for the task: (returnval){ [ 1614.337094] env[69027]: value = "task-3395229" [ 1614.337094] env[69027]: _type = "Task" [ 1614.337094] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.345223] env[69027]: DEBUG oslo_vmware.api [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Task: {'id': task-3395229, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.847217] env[69027]: DEBUG oslo_vmware.exceptions [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1614.847521] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.848119] env[69027]: ERROR nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1614.848119] env[69027]: Faults: ['InvalidArgument'] [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Traceback (most recent call last): [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] yield resources [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] self.driver.spawn(context, instance, image_meta, [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] self._fetch_image_if_missing(context, vi) [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] image_cache(vi, tmp_image_ds_loc) [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] vm_util.copy_virtual_disk( [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] session._wait_for_task(vmdk_copy_task) [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] return self.wait_for_task(task_ref) [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] return evt.wait() [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] result = hub.switch() [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] return self.greenlet.switch() [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] self.f(*self.args, **self.kw) [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] raise exceptions.translate_fault(task_info.error) [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Faults: ['InvalidArgument'] [ 1614.848119] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] [ 1614.849075] env[69027]: INFO nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Terminating instance [ 1614.849995] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.850220] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1614.850450] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f9263ac-e2c7-43d6-8125-f53645776041 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.852532] env[69027]: DEBUG nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1614.852721] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1614.853442] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1db48e-567d-4bcf-baca-9322d58f3ea9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.860202] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1614.860407] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c23eaa02-456c-4e49-a1b9-572acfce1d9a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.862436] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1614.862606] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1614.863561] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e521959-bbd8-460b-9664-528ea0e4f3dd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.868093] env[69027]: DEBUG oslo_vmware.api [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Waiting for the task: (returnval){ [ 1614.868093] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5205ca09-0724-f44d-1ab1-9dbe2ecd08e7" [ 1614.868093] env[69027]: _type = "Task" [ 1614.868093] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.875179] env[69027]: DEBUG oslo_vmware.api [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5205ca09-0724-f44d-1ab1-9dbe2ecd08e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.937401] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1614.937684] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1614.937907] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Deleting the datastore file [datastore2] 1715faa2-86ea-49f9-a993-1003aea54384 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1614.938178] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f16a6a5e-5b70-401d-8639-fff96faf9b5d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.943912] env[69027]: DEBUG oslo_vmware.api [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Waiting for the task: (returnval){ [ 1614.943912] env[69027]: value = "task-3395231" [ 1614.943912] env[69027]: _type = "Task" [ 1614.943912] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.951499] env[69027]: DEBUG oslo_vmware.api [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Task: {'id': task-3395231, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.378556] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1615.378845] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Creating directory with path [datastore2] vmware_temp/1c66c050-0df6-46a9-a744-152f85dd8c1b/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1615.379090] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8110dae0-613b-43c9-bb81-f5999b065d06 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.391424] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Created directory with path [datastore2] vmware_temp/1c66c050-0df6-46a9-a744-152f85dd8c1b/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1615.391607] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Fetch image to [datastore2] vmware_temp/1c66c050-0df6-46a9-a744-152f85dd8c1b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1615.391791] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/1c66c050-0df6-46a9-a744-152f85dd8c1b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1615.392514] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e202bc-7aa1-4a28-ad09-ca1686d71a44 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.399019] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28dfa7cc-7cc9-4de0-99b6-066911be8014 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.407994] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33d6d1e-f901-4175-bec2-f166d98c6a44 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.437864] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9728e953-f037-4e39-bd5d-a6676615a12b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.443614] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-23767743-1a83-4056-becd-d40aeb389455 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.452662] env[69027]: DEBUG oslo_vmware.api [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Task: {'id': task-3395231, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066342} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.452901] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1615.453092] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1615.453266] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1615.453437] env[69027]: INFO nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1615.455756] env[69027]: DEBUG nova.compute.claims [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1615.455756] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.457218] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.467191] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1615.522460] env[69027]: DEBUG oslo_vmware.rw_handles [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1c66c050-0df6-46a9-a744-152f85dd8c1b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1615.583117] env[69027]: DEBUG oslo_vmware.rw_handles [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1615.584096] env[69027]: DEBUG oslo_vmware.rw_handles [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1c66c050-0df6-46a9-a744-152f85dd8c1b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1615.701263] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b51cb7-ea00-48a7-9751-665c6fb77b73 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.708916] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ce6bca-78ae-4d5a-9ba5-34b36fed30e0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.739267] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d155ec74-e3e4-413a-8b75-577f012a55f9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.745970] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00af398-6aec-4174-a272-c10d7cf45059 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.758659] env[69027]: DEBUG nova.compute.provider_tree [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1615.766846] env[69027]: DEBUG nova.scheduler.client.report [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1615.780417] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.324s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.781030] env[69027]: ERROR nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1615.781030] env[69027]: Faults: ['InvalidArgument'] [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Traceback (most recent call last): [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] self.driver.spawn(context, instance, image_meta, [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] self._fetch_image_if_missing(context, vi) [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] image_cache(vi, tmp_image_ds_loc) [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] vm_util.copy_virtual_disk( [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] session._wait_for_task(vmdk_copy_task) [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] return self.wait_for_task(task_ref) [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] return evt.wait() [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] result = hub.switch() [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] return self.greenlet.switch() [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] self.f(*self.args, **self.kw) [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] raise exceptions.translate_fault(task_info.error) [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Faults: ['InvalidArgument'] [ 1615.781030] env[69027]: ERROR nova.compute.manager [instance: 1715faa2-86ea-49f9-a993-1003aea54384] [ 1615.781966] env[69027]: DEBUG nova.compute.utils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1615.783061] env[69027]: DEBUG nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Build of instance 1715faa2-86ea-49f9-a993-1003aea54384 was re-scheduled: A specified parameter was not correct: fileType [ 1615.783061] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1615.783433] env[69027]: DEBUG nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1615.783608] env[69027]: DEBUG nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1615.783797] env[69027]: DEBUG nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1615.783973] env[69027]: DEBUG nova.network.neutron [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1616.072026] env[69027]: DEBUG nova.network.neutron [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.085664] env[69027]: INFO nova.compute.manager [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Took 0.30 seconds to deallocate network for instance. [ 1616.188860] env[69027]: INFO nova.scheduler.client.report [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Deleted allocations for instance 1715faa2-86ea-49f9-a993-1003aea54384 [ 1616.214872] env[69027]: DEBUG oslo_concurrency.lockutils [None req-eab2eb28-c2d5-4855-9793-f37a0065b6ea tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "1715faa2-86ea-49f9-a993-1003aea54384" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 584.495s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.216067] env[69027]: DEBUG oslo_concurrency.lockutils [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "1715faa2-86ea-49f9-a993-1003aea54384" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 388.277s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.216287] env[69027]: DEBUG oslo_concurrency.lockutils [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Acquiring lock "1715faa2-86ea-49f9-a993-1003aea54384-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.216497] env[69027]: DEBUG oslo_concurrency.lockutils [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "1715faa2-86ea-49f9-a993-1003aea54384-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.216764] env[69027]: DEBUG oslo_concurrency.lockutils [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "1715faa2-86ea-49f9-a993-1003aea54384-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.219969] env[69027]: INFO nova.compute.manager [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Terminating instance [ 1616.221774] env[69027]: DEBUG nova.compute.manager [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1616.221970] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1616.222240] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0b19e1c-ada6-4c4f-bf14-b40a803f2bb1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.233184] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece9dc7d-8288-4390-9fd4-7c2bea3303cf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.243441] env[69027]: DEBUG nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1616.264263] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1715faa2-86ea-49f9-a993-1003aea54384 could not be found. [ 1616.264522] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1616.264800] env[69027]: INFO nova.compute.manager [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1616.265011] env[69027]: DEBUG oslo.service.loopingcall [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1616.265296] env[69027]: DEBUG nova.compute.manager [-] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1616.265396] env[69027]: DEBUG nova.network.neutron [-] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1616.289525] env[69027]: DEBUG nova.network.neutron [-] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.295946] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.296208] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.299573] env[69027]: INFO nova.compute.claims [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1616.302392] env[69027]: INFO nova.compute.manager [-] [instance: 1715faa2-86ea-49f9-a993-1003aea54384] Took 0.04 seconds to deallocate network for instance. [ 1616.390500] env[69027]: DEBUG oslo_concurrency.lockutils [None req-19df4136-4e64-488d-8140-db59e64b1e27 tempest-AttachVolumeTestJSON-1571304634 tempest-AttachVolumeTestJSON-1571304634-project-member] Lock "1715faa2-86ea-49f9-a993-1003aea54384" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.491055] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dcd2d4a-6fb9-47e2-bbcf-681c06ddf709 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.499278] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031c6b67-4536-4525-ba71-fafc6be9aa8d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.527929] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e516d96c-c714-41c9-a382-3c788f1a5e96 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.535411] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88316cfa-2ac7-4d10-8b59-c87c1c802ee4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.548303] env[69027]: DEBUG nova.compute.provider_tree [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1616.556958] env[69027]: DEBUG nova.scheduler.client.report [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1616.569797] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.273s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.570291] env[69027]: DEBUG nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1616.599610] env[69027]: DEBUG nova.compute.utils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1616.600990] env[69027]: DEBUG nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1616.601177] env[69027]: DEBUG nova.network.neutron [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1616.609615] env[69027]: DEBUG nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1616.659493] env[69027]: DEBUG nova.policy [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4c7f278f8857481786002608a110697d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4c558fa59cdb4454957a8e7792365a47', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1616.671257] env[69027]: DEBUG nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1616.696643] env[69027]: DEBUG nova.virt.hardware [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1616.696891] env[69027]: DEBUG nova.virt.hardware [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1616.697061] env[69027]: DEBUG nova.virt.hardware [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1616.697250] env[69027]: DEBUG nova.virt.hardware [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1616.697395] env[69027]: DEBUG nova.virt.hardware [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1616.697541] env[69027]: DEBUG nova.virt.hardware [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1616.698145] env[69027]: DEBUG nova.virt.hardware [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1616.698145] env[69027]: DEBUG nova.virt.hardware [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1616.698145] env[69027]: DEBUG nova.virt.hardware [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1616.698287] env[69027]: DEBUG nova.virt.hardware [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1616.698461] env[69027]: DEBUG nova.virt.hardware [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1616.699337] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bea9d86-11ef-4b85-9c42-0c13b1c48d9c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.707645] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c18623f-2886-43a6-b9a3-01064d754144 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.197203] env[69027]: DEBUG nova.network.neutron [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Successfully created port: 3421aa0d-f8ab-4a98-ae91-9fbb00dab987 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1618.016280] env[69027]: DEBUG nova.network.neutron [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Successfully updated port: 3421aa0d-f8ab-4a98-ae91-9fbb00dab987 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1618.027534] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "refresh_cache-1cbeaaaf-7915-47ee-be61-52f8e05403d9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.027891] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquired lock "refresh_cache-1cbeaaaf-7915-47ee-be61-52f8e05403d9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.028129] env[69027]: DEBUG nova.network.neutron [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1618.073395] env[69027]: DEBUG nova.network.neutron [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1618.147441] env[69027]: DEBUG nova.compute.manager [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Received event network-vif-plugged-3421aa0d-f8ab-4a98-ae91-9fbb00dab987 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1618.147678] env[69027]: DEBUG oslo_concurrency.lockutils [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] Acquiring lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.147914] env[69027]: DEBUG oslo_concurrency.lockutils [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] Lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.148121] env[69027]: DEBUG oslo_concurrency.lockutils [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] Lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.148278] env[69027]: DEBUG nova.compute.manager [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] No waiting events found dispatching network-vif-plugged-3421aa0d-f8ab-4a98-ae91-9fbb00dab987 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1618.148442] env[69027]: WARNING nova.compute.manager [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Received unexpected event network-vif-plugged-3421aa0d-f8ab-4a98-ae91-9fbb00dab987 for instance with vm_state building and task_state spawning. [ 1618.148676] env[69027]: DEBUG nova.compute.manager [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Received event network-changed-3421aa0d-f8ab-4a98-ae91-9fbb00dab987 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1618.148848] env[69027]: DEBUG nova.compute.manager [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Refreshing instance network info cache due to event network-changed-3421aa0d-f8ab-4a98-ae91-9fbb00dab987. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1618.149108] env[69027]: DEBUG oslo_concurrency.lockutils [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] Acquiring lock "refresh_cache-1cbeaaaf-7915-47ee-be61-52f8e05403d9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.240737] env[69027]: DEBUG nova.network.neutron [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Updating instance_info_cache with network_info: [{"id": "3421aa0d-f8ab-4a98-ae91-9fbb00dab987", "address": "fa:16:3e:87:c2:d9", "network": {"id": "ef5d0002-4d0c-452c-8eaf-fcbb3db325fc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-613479526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c558fa59cdb4454957a8e7792365a47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3421aa0d-f8", "ovs_interfaceid": "3421aa0d-f8ab-4a98-ae91-9fbb00dab987", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.253623] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Releasing lock "refresh_cache-1cbeaaaf-7915-47ee-be61-52f8e05403d9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1618.253933] env[69027]: DEBUG nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Instance network_info: |[{"id": "3421aa0d-f8ab-4a98-ae91-9fbb00dab987", "address": "fa:16:3e:87:c2:d9", "network": {"id": "ef5d0002-4d0c-452c-8eaf-fcbb3db325fc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-613479526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c558fa59cdb4454957a8e7792365a47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3421aa0d-f8", "ovs_interfaceid": "3421aa0d-f8ab-4a98-ae91-9fbb00dab987", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1618.254241] env[69027]: DEBUG oslo_concurrency.lockutils [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] Acquired lock "refresh_cache-1cbeaaaf-7915-47ee-be61-52f8e05403d9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.254420] env[69027]: DEBUG nova.network.neutron [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Refreshing network info cache for port 3421aa0d-f8ab-4a98-ae91-9fbb00dab987 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1618.255416] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:c2:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3421aa0d-f8ab-4a98-ae91-9fbb00dab987', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1618.262870] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Creating folder: Project (4c558fa59cdb4454957a8e7792365a47). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1618.265573] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42a0d251-a526-4e08-87db-0692cc770322 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.276608] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Created folder: Project (4c558fa59cdb4454957a8e7792365a47) in parent group-v677321. [ 1618.276802] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Creating folder: Instances. Parent ref: group-v677412. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1618.277043] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6d39949-d1b2-4e06-9f19-22f93a151ec0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.286493] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Created folder: Instances in parent group-v677412. [ 1618.286799] env[69027]: DEBUG oslo.service.loopingcall [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1618.287030] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1618.287269] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4a47e28-799e-4bb9-a580-98c1315270a1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.307089] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1618.307089] env[69027]: value = "task-3395234" [ 1618.307089] env[69027]: _type = "Task" [ 1618.307089] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.314280] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395234, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.568480] env[69027]: DEBUG nova.network.neutron [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Updated VIF entry in instance network info cache for port 3421aa0d-f8ab-4a98-ae91-9fbb00dab987. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1618.569079] env[69027]: DEBUG nova.network.neutron [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Updating instance_info_cache with network_info: [{"id": "3421aa0d-f8ab-4a98-ae91-9fbb00dab987", "address": "fa:16:3e:87:c2:d9", "network": {"id": "ef5d0002-4d0c-452c-8eaf-fcbb3db325fc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-613479526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c558fa59cdb4454957a8e7792365a47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3421aa0d-f8", "ovs_interfaceid": "3421aa0d-f8ab-4a98-ae91-9fbb00dab987", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.580616] env[69027]: DEBUG oslo_concurrency.lockutils [req-1be37fe2-a0c3-4364-addb-694ab69ae2b5 req-564d956c-9c85-4c07-bc87-25cfe557e9d4 service nova] Releasing lock "refresh_cache-1cbeaaaf-7915-47ee-be61-52f8e05403d9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1618.816950] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395234, 'name': CreateVM_Task, 'duration_secs': 0.269026} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.818025] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1618.818025] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.818190] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.818501] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1618.818785] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67d57e79-0dfd-41fb-b769-f08276608b8c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.823330] env[69027]: DEBUG oslo_vmware.api [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for the task: (returnval){ [ 1618.823330] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]528ebe2c-4bfc-d74f-ff09-ddedc975f208" [ 1618.823330] env[69027]: _type = "Task" [ 1618.823330] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.831450] env[69027]: DEBUG oslo_vmware.api [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]528ebe2c-4bfc-d74f-ff09-ddedc975f208, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.335421] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.335774] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1619.336122] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.771260] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1620.786058] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.786058] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.786058] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.786058] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1620.786058] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a17c61-775b-44e3-b8bb-119d539f6178 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.794471] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26f2269-1533-4ed4-ab1d-789d5727c3bf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.808926] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb4af13-2aaf-4c68-b797-a04d1ac709f5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.815340] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6a6356-62f2-4bb6-9a11-1e859dbe006a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.844030] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180985MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1620.844030] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.844226] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.914355] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance edc3a0ff-c592-47b8-9753-1b4831bee576 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.914514] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.914641] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.914787] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.914926] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03d9d361-da15-4fb7-acfb-049098183bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.915074] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b930e792-b0a8-45e4-9330-befac22182b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.915202] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.915318] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.915431] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1d3442ae-f46f-433d-bccb-f323463e3a21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.915603] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1620.927272] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ed6243ed-cd06-46cd-b592-f05d5cd83139 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1620.937769] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1620.937915] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1620.938048] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1621.088921] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae5afac-ef57-40ae-876b-97f12d792c38 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.095971] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa818cc3-f8be-4121-b45f-e6053e910eb4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.125495] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2788039-4142-4f62-8c51-8d11d53a0fb9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.132368] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f78438-8bd1-4f63-b60f-61d3507577ff {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.144874] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1621.153554] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1621.167561] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1621.167836] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.324s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.194521] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.164132] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1624.164440] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1624.164553] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1624.164649] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1624.185869] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1624.186085] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1624.186154] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1624.186275] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1624.186398] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1624.186521] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1624.186654] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1624.186744] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1624.186888] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1624.187083] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1624.187218] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1624.187738] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1625.771240] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1626.771499] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.768560] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.790574] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.790949] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.790949] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1629.772453] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1629.882956] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "01d7b088-73b4-4624-b013-2da51bf78767" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.882956] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "01d7b088-73b4-4624-b013-2da51bf78767" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.693181] env[69027]: DEBUG oslo_concurrency.lockutils [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "1d3442ae-f46f-433d-bccb-f323463e3a21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.426071] env[69027]: DEBUG oslo_concurrency.lockutils [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.341680] env[69027]: WARNING oslo_vmware.rw_handles [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1664.341680] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1664.341680] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1664.341680] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1664.341680] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1664.341680] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1664.341680] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1664.341680] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1664.341680] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1664.341680] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1664.341680] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1664.341680] env[69027]: ERROR oslo_vmware.rw_handles [ 1664.342331] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/1c66c050-0df6-46a9-a744-152f85dd8c1b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1664.344212] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1664.344463] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Copying Virtual Disk [datastore2] vmware_temp/1c66c050-0df6-46a9-a744-152f85dd8c1b/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/1c66c050-0df6-46a9-a744-152f85dd8c1b/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1664.344762] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4591982a-cbb9-4c4e-a04b-eb801a900ae1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.352612] env[69027]: DEBUG oslo_vmware.api [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Waiting for the task: (returnval){ [ 1664.352612] env[69027]: value = "task-3395235" [ 1664.352612] env[69027]: _type = "Task" [ 1664.352612] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.360212] env[69027]: DEBUG oslo_vmware.api [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Task: {'id': task-3395235, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.862818] env[69027]: DEBUG oslo_vmware.exceptions [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1664.863126] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1664.863671] env[69027]: ERROR nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1664.863671] env[69027]: Faults: ['InvalidArgument'] [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Traceback (most recent call last): [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] yield resources [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] self.driver.spawn(context, instance, image_meta, [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] self._fetch_image_if_missing(context, vi) [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] image_cache(vi, tmp_image_ds_loc) [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] vm_util.copy_virtual_disk( [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] session._wait_for_task(vmdk_copy_task) [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] return self.wait_for_task(task_ref) [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] return evt.wait() [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] result = hub.switch() [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] return self.greenlet.switch() [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] self.f(*self.args, **self.kw) [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] raise exceptions.translate_fault(task_info.error) [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Faults: ['InvalidArgument'] [ 1664.863671] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] [ 1664.864700] env[69027]: INFO nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Terminating instance [ 1664.865586] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1664.865793] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1664.866039] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63daaea9-432f-441d-a3f7-1f7d2656ad39 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.869361] env[69027]: DEBUG nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1664.869583] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1664.870313] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79decfe-3ccb-4bdb-b322-cab330c7d73b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.877015] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1664.877271] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad00e7c8-3c66-43a0-8739-d15f2e48b3ae {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.879503] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1664.879684] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1664.880675] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afef6bd3-0fb2-4263-938e-34c1fb33e454 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.885490] env[69027]: DEBUG oslo_vmware.api [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for the task: (returnval){ [ 1664.885490] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52fadbbf-e503-b6af-3d45-65bb66b31b18" [ 1664.885490] env[69027]: _type = "Task" [ 1664.885490] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.892277] env[69027]: DEBUG oslo_vmware.api [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52fadbbf-e503-b6af-3d45-65bb66b31b18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.943053] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1664.943293] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1664.943475] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Deleting the datastore file [datastore2] edc3a0ff-c592-47b8-9753-1b4831bee576 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1664.943738] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81ac3542-5edc-4872-8b7c-f4aa1166949c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.950912] env[69027]: DEBUG oslo_vmware.api [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Waiting for the task: (returnval){ [ 1664.950912] env[69027]: value = "task-3395237" [ 1664.950912] env[69027]: _type = "Task" [ 1664.950912] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.960793] env[69027]: DEBUG oslo_vmware.api [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Task: {'id': task-3395237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.396496] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1665.396817] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Creating directory with path [datastore2] vmware_temp/91ad5c12-e680-4b1c-9a05-aec8b3a82ee4/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1665.396975] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc9c1782-06a7-4a94-aeac-f71e9a5e1d89 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.407827] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Created directory with path [datastore2] vmware_temp/91ad5c12-e680-4b1c-9a05-aec8b3a82ee4/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1665.408046] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Fetch image to [datastore2] vmware_temp/91ad5c12-e680-4b1c-9a05-aec8b3a82ee4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1665.408227] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/91ad5c12-e680-4b1c-9a05-aec8b3a82ee4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1665.408922] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030ac156-177b-435b-975f-b63e6048b354 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.415468] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399a192f-43b1-4236-ba58-88fe7c832185 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.424257] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f25f869-63d2-4923-87b7-9d124dc6b89b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.459679] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4170320c-3ac4-4f49-9cb3-465a4b39abb8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.466947] env[69027]: DEBUG oslo_vmware.api [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Task: {'id': task-3395237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079757} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.468461] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1665.468652] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1665.468827] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1665.469039] env[69027]: INFO nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1665.470842] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5fbf9280-f829-4940-ba00-efd00a4b7ca1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.472740] env[69027]: DEBUG nova.compute.claims [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1665.472916] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.473140] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.496018] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1665.549898] env[69027]: DEBUG oslo_vmware.rw_handles [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91ad5c12-e680-4b1c-9a05-aec8b3a82ee4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1665.612462] env[69027]: DEBUG oslo_vmware.rw_handles [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1665.612656] env[69027]: DEBUG oslo_vmware.rw_handles [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/91ad5c12-e680-4b1c-9a05-aec8b3a82ee4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1665.718942] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5929d1eb-ebeb-4f79-aeca-f6dfa0d4ce05 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.727264] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8894c79d-ddf7-4148-9db0-c2d50e6dfebe {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.758484] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86cb44d1-73b0-4e6f-b495-3f3e96349260 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.765720] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b710fab5-d1ad-47c5-8a28-8032c43fc6a7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.778774] env[69027]: DEBUG nova.compute.provider_tree [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1665.787730] env[69027]: DEBUG nova.scheduler.client.report [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1665.803068] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.330s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.803644] env[69027]: ERROR nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1665.803644] env[69027]: Faults: ['InvalidArgument'] [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Traceback (most recent call last): [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] self.driver.spawn(context, instance, image_meta, [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] self._fetch_image_if_missing(context, vi) [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] image_cache(vi, tmp_image_ds_loc) [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] vm_util.copy_virtual_disk( [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] session._wait_for_task(vmdk_copy_task) [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] return self.wait_for_task(task_ref) [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] return evt.wait() [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] result = hub.switch() [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] return self.greenlet.switch() [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] self.f(*self.args, **self.kw) [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] raise exceptions.translate_fault(task_info.error) [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Faults: ['InvalidArgument'] [ 1665.803644] env[69027]: ERROR nova.compute.manager [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] [ 1665.804478] env[69027]: DEBUG nova.compute.utils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1665.806681] env[69027]: DEBUG nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Build of instance edc3a0ff-c592-47b8-9753-1b4831bee576 was re-scheduled: A specified parameter was not correct: fileType [ 1665.806681] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1665.807189] env[69027]: DEBUG nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1665.807437] env[69027]: DEBUG nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1665.807788] env[69027]: DEBUG nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1665.808117] env[69027]: DEBUG nova.network.neutron [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1666.406637] env[69027]: DEBUG nova.network.neutron [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.423852] env[69027]: INFO nova.compute.manager [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Took 0.62 seconds to deallocate network for instance. [ 1666.535969] env[69027]: INFO nova.scheduler.client.report [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Deleted allocations for instance edc3a0ff-c592-47b8-9753-1b4831bee576 [ 1666.558903] env[69027]: DEBUG oslo_concurrency.lockutils [None req-620c255a-49dd-4498-b2a1-67c8d8a53404 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Lock "edc3a0ff-c592-47b8-9753-1b4831bee576" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 586.382s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.560055] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Lock "edc3a0ff-c592-47b8-9753-1b4831bee576" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 391.294s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.560296] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Acquiring lock "edc3a0ff-c592-47b8-9753-1b4831bee576-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.563669] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Lock "edc3a0ff-c592-47b8-9753-1b4831bee576-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.563669] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Lock "edc3a0ff-c592-47b8-9753-1b4831bee576-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.563669] env[69027]: INFO nova.compute.manager [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Terminating instance [ 1666.564532] env[69027]: DEBUG nova.compute.manager [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1666.564715] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1666.565184] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a0298f7-01f2-4885-b83c-e286ae463a8c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.575870] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-141fb77f-26fc-4de4-8418-921a1abaed0d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.587625] env[69027]: DEBUG nova.compute.manager [None req-f3833cdd-e788-4314-95d5-5e5b269b16ca tempest-ServersNegativeTestJSON-332394447 tempest-ServersNegativeTestJSON-332394447-project-member] [instance: ed6243ed-cd06-46cd-b592-f05d5cd83139] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1666.608827] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance edc3a0ff-c592-47b8-9753-1b4831bee576 could not be found. [ 1666.609048] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1666.609229] env[69027]: INFO nova.compute.manager [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1666.609482] env[69027]: DEBUG oslo.service.loopingcall [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1666.609809] env[69027]: DEBUG nova.compute.manager [-] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1666.609809] env[69027]: DEBUG nova.network.neutron [-] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1666.612691] env[69027]: DEBUG nova.compute.manager [None req-f3833cdd-e788-4314-95d5-5e5b269b16ca tempest-ServersNegativeTestJSON-332394447 tempest-ServersNegativeTestJSON-332394447-project-member] [instance: ed6243ed-cd06-46cd-b592-f05d5cd83139] Instance disappeared before build. {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1666.647444] env[69027]: DEBUG nova.network.neutron [-] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.655385] env[69027]: DEBUG oslo_concurrency.lockutils [None req-f3833cdd-e788-4314-95d5-5e5b269b16ca tempest-ServersNegativeTestJSON-332394447 tempest-ServersNegativeTestJSON-332394447-project-member] Lock "ed6243ed-cd06-46cd-b592-f05d5cd83139" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.356s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.657084] env[69027]: INFO nova.compute.manager [-] [instance: edc3a0ff-c592-47b8-9753-1b4831bee576] Took 0.05 seconds to deallocate network for instance. [ 1666.665452] env[69027]: DEBUG nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1666.735359] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.738018] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.738298] env[69027]: INFO nova.compute.claims [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1666.750902] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6cf5b896-0cb7-4232-8487-7b0e6b3802b0 tempest-ServerMetadataNegativeTestJSON-1882516831 tempest-ServerMetadataNegativeTestJSON-1882516831-project-member] Lock "edc3a0ff-c592-47b8-9753-1b4831bee576" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.960327] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53af88b-88a1-441e-a60a-609a7e903571 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.968829] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff89ba45-c084-4ef1-a73a-80b9c1c21214 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.005557] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd01325-1cf0-45eb-8ce8-787a186487e4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.014421] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6308f784-362b-4800-b2b4-02fff9b4719e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.028194] env[69027]: DEBUG nova.compute.provider_tree [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1667.042889] env[69027]: DEBUG nova.scheduler.client.report [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1667.059152] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.323s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.059665] env[69027]: DEBUG nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1667.097567] env[69027]: DEBUG nova.compute.utils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1667.098892] env[69027]: DEBUG nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1667.099176] env[69027]: DEBUG nova.network.neutron [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1667.109815] env[69027]: DEBUG nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1667.188569] env[69027]: DEBUG nova.policy [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9cae5394177466e9afb1f8fa26e15ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ed36a72c2994c47a7313f7bbb37640a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1667.199756] env[69027]: DEBUG nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1667.230845] env[69027]: DEBUG nova.virt.hardware [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1667.231329] env[69027]: DEBUG nova.virt.hardware [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1667.231623] env[69027]: DEBUG nova.virt.hardware [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1667.231927] env[69027]: DEBUG nova.virt.hardware [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1667.232218] env[69027]: DEBUG nova.virt.hardware [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1667.236019] env[69027]: DEBUG nova.virt.hardware [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1667.236019] env[69027]: DEBUG nova.virt.hardware [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1667.236019] env[69027]: DEBUG nova.virt.hardware [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1667.236019] env[69027]: DEBUG nova.virt.hardware [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1667.236019] env[69027]: DEBUG nova.virt.hardware [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1667.236019] env[69027]: DEBUG nova.virt.hardware [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1667.236019] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7212c48-a63b-46cc-aadf-fefad1fb093b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.244395] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef8d38c-cfd6-46bf-b3e2-b8c848e4f5e6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.517146] env[69027]: DEBUG nova.network.neutron [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Successfully created port: 01ab411e-2aa1-4c84-abeb-b025627ee2b7 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1668.286783] env[69027]: DEBUG nova.network.neutron [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Successfully updated port: 01ab411e-2aa1-4c84-abeb-b025627ee2b7 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1668.298304] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "refresh_cache-3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1668.298443] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired lock "refresh_cache-3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.298590] env[69027]: DEBUG nova.network.neutron [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1668.356819] env[69027]: DEBUG nova.network.neutron [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1668.447529] env[69027]: DEBUG nova.compute.manager [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Received event network-vif-plugged-01ab411e-2aa1-4c84-abeb-b025627ee2b7 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1668.447739] env[69027]: DEBUG oslo_concurrency.lockutils [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] Acquiring lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.447945] env[69027]: DEBUG oslo_concurrency.lockutils [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] Lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.448153] env[69027]: DEBUG oslo_concurrency.lockutils [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] Lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.448331] env[69027]: DEBUG nova.compute.manager [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] No waiting events found dispatching network-vif-plugged-01ab411e-2aa1-4c84-abeb-b025627ee2b7 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1668.448498] env[69027]: WARNING nova.compute.manager [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Received unexpected event network-vif-plugged-01ab411e-2aa1-4c84-abeb-b025627ee2b7 for instance with vm_state building and task_state spawning. [ 1668.448660] env[69027]: DEBUG nova.compute.manager [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Received event network-changed-01ab411e-2aa1-4c84-abeb-b025627ee2b7 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1668.448864] env[69027]: DEBUG nova.compute.manager [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Refreshing instance network info cache due to event network-changed-01ab411e-2aa1-4c84-abeb-b025627ee2b7. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1668.449070] env[69027]: DEBUG oslo_concurrency.lockutils [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] Acquiring lock "refresh_cache-3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1668.523647] env[69027]: DEBUG nova.network.neutron [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Updating instance_info_cache with network_info: [{"id": "01ab411e-2aa1-4c84-abeb-b025627ee2b7", "address": "fa:16:3e:31:70:44", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01ab411e-2a", "ovs_interfaceid": "01ab411e-2aa1-4c84-abeb-b025627ee2b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.536085] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Releasing lock "refresh_cache-3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.536392] env[69027]: DEBUG nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Instance network_info: |[{"id": "01ab411e-2aa1-4c84-abeb-b025627ee2b7", "address": "fa:16:3e:31:70:44", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01ab411e-2a", "ovs_interfaceid": "01ab411e-2aa1-4c84-abeb-b025627ee2b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1668.536688] env[69027]: DEBUG oslo_concurrency.lockutils [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] Acquired lock "refresh_cache-3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.536870] env[69027]: DEBUG nova.network.neutron [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Refreshing network info cache for port 01ab411e-2aa1-4c84-abeb-b025627ee2b7 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1668.537942] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:70:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01ab411e-2aa1-4c84-abeb-b025627ee2b7', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1668.545281] env[69027]: DEBUG oslo.service.loopingcall [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1668.546079] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1668.548348] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bdf07f35-3281-4eee-b277-292efbb92be9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.568151] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1668.568151] env[69027]: value = "task-3395238" [ 1668.568151] env[69027]: _type = "Task" [ 1668.568151] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.575792] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395238, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.848922] env[69027]: DEBUG nova.network.neutron [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Updated VIF entry in instance network info cache for port 01ab411e-2aa1-4c84-abeb-b025627ee2b7. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1668.849347] env[69027]: DEBUG nova.network.neutron [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Updating instance_info_cache with network_info: [{"id": "01ab411e-2aa1-4c84-abeb-b025627ee2b7", "address": "fa:16:3e:31:70:44", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01ab411e-2a", "ovs_interfaceid": "01ab411e-2aa1-4c84-abeb-b025627ee2b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.858740] env[69027]: DEBUG oslo_concurrency.lockutils [req-7d971a86-2efa-456a-b4e9-c78bad5500b3 req-405bd524-10ed-407a-a96d-c2e7f0a3568f service nova] Releasing lock "refresh_cache-3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.079147] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395238, 'name': CreateVM_Task, 'duration_secs': 0.283639} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.079314] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1669.079949] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.080158] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.080533] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1669.080783] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12cdcf39-37bc-4370-86e8-7143a3b50479 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.084969] env[69027]: DEBUG oslo_vmware.api [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for the task: (returnval){ [ 1669.084969] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5266dacb-fb3b-025a-421c-6a00aac5f08a" [ 1669.084969] env[69027]: _type = "Task" [ 1669.084969] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.092280] env[69027]: DEBUG oslo_vmware.api [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5266dacb-fb3b-025a-421c-6a00aac5f08a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.594838] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.595143] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1669.595311] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1682.772145] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1682.783856] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.784089] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.784259] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.784419] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1682.785552] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e89474e-1ac5-47c6-b7c2-f4930b68db31 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.794346] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d611a314-0ab9-46bf-a384-9a9f76ba3ae3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.807830] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2daee6b1-01c0-4738-805b-f449de29410e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.814027] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064c98d6-9ffc-4a89-94a9-71731663db48 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.844044] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180993MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1682.844208] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.844406] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.916009] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1682.916273] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1682.916355] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1682.916431] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03d9d361-da15-4fb7-acfb-049098183bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1682.916547] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b930e792-b0a8-45e4-9330-befac22182b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1682.916690] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1682.916839] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1682.916958] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1d3442ae-f46f-433d-bccb-f323463e3a21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1682.917084] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1682.917202] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1682.928471] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 01d7b088-73b4-4624-b013-2da51bf78767 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1682.928649] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1682.928798] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1683.062443] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db34b91c-5551-4e65-9eb4-e83c656e5329 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.069846] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94bac85d-21fe-4756-a7ed-bdea12020974 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.099069] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe13436-1ffe-42d1-9e8f-d1dd09d42873 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.105602] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc2f878-ac58-40fc-b5f3-90f0a78359f8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.117933] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1683.126288] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1683.140711] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1683.140890] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.296s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.140948] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1684.141245] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1684.141317] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1684.160960] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1684.161127] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1684.161263] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1684.161392] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1684.161523] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1684.161644] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1684.161762] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1684.161878] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1684.161994] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1684.162123] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1684.162241] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1684.771168] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1685.767673] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1687.771101] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1687.771462] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.773243] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.773243] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.773243] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1689.773243] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1714.360069] env[69027]: WARNING oslo_vmware.rw_handles [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1714.360069] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1714.360069] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1714.360069] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1714.360069] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1714.360069] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1714.360069] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1714.360069] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1714.360069] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1714.360069] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1714.360069] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1714.360069] env[69027]: ERROR oslo_vmware.rw_handles [ 1714.360753] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/91ad5c12-e680-4b1c-9a05-aec8b3a82ee4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1714.362589] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1714.362870] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Copying Virtual Disk [datastore2] vmware_temp/91ad5c12-e680-4b1c-9a05-aec8b3a82ee4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/91ad5c12-e680-4b1c-9a05-aec8b3a82ee4/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1714.363184] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe131b05-417f-4fc6-b3f1-c7cfdb351bfd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.372805] env[69027]: DEBUG oslo_vmware.api [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for the task: (returnval){ [ 1714.372805] env[69027]: value = "task-3395239" [ 1714.372805] env[69027]: _type = "Task" [ 1714.372805] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.380366] env[69027]: DEBUG oslo_vmware.api [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': task-3395239, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.883258] env[69027]: DEBUG oslo_vmware.exceptions [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1714.883487] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.884052] env[69027]: ERROR nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1714.884052] env[69027]: Faults: ['InvalidArgument'] [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Traceback (most recent call last): [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] yield resources [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] self.driver.spawn(context, instance, image_meta, [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] self._fetch_image_if_missing(context, vi) [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] image_cache(vi, tmp_image_ds_loc) [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] vm_util.copy_virtual_disk( [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] session._wait_for_task(vmdk_copy_task) [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] return self.wait_for_task(task_ref) [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] return evt.wait() [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] result = hub.switch() [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] return self.greenlet.switch() [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] self.f(*self.args, **self.kw) [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] raise exceptions.translate_fault(task_info.error) [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Faults: ['InvalidArgument'] [ 1714.884052] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] [ 1714.885386] env[69027]: INFO nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Terminating instance [ 1714.885943] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.886164] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1714.886402] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b738f39d-7b6f-453a-a979-1dfb51792c8d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.888713] env[69027]: DEBUG nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1714.888907] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1714.889660] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99548a4-2dda-42e6-87f8-de9897248d08 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.896674] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1714.896881] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-620ca54b-d4ab-4c32-ad28-cf0ea1059703 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.898960] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1714.899160] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1714.900109] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36cd3a83-4035-494c-9894-08dd88e719a8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.904824] env[69027]: DEBUG oslo_vmware.api [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for the task: (returnval){ [ 1714.904824] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]526799e2-2853-02c3-d9b5-ee589ca6bb86" [ 1714.904824] env[69027]: _type = "Task" [ 1714.904824] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.911601] env[69027]: DEBUG oslo_vmware.api [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]526799e2-2853-02c3-d9b5-ee589ca6bb86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.972543] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1714.972766] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1714.972944] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Deleting the datastore file [datastore2] 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1714.973286] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6efdcead-892d-48ce-ab29-45900d6d9877 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.979729] env[69027]: DEBUG oslo_vmware.api [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for the task: (returnval){ [ 1714.979729] env[69027]: value = "task-3395241" [ 1714.979729] env[69027]: _type = "Task" [ 1714.979729] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.987456] env[69027]: DEBUG oslo_vmware.api [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': task-3395241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.415629] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1715.415928] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Creating directory with path [datastore2] vmware_temp/e37f5109-76f7-4d22-8940-ecf188fe58dc/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1715.416154] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-801686da-b565-40a6-a8cf-46e77d58a162 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.427268] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Created directory with path [datastore2] vmware_temp/e37f5109-76f7-4d22-8940-ecf188fe58dc/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1715.427453] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Fetch image to [datastore2] vmware_temp/e37f5109-76f7-4d22-8940-ecf188fe58dc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1715.427657] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/e37f5109-76f7-4d22-8940-ecf188fe58dc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1715.428366] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844d6a4c-1517-4a61-915a-03865e37be24 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.434988] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd851280-5c23-4097-bbc5-f2aac570011e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.443662] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50706d66-154c-4ce2-b7b5-157484202ced {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.474814] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f030a59c-5aeb-4d36-b7ee-5b3b3e376a04 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.479958] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cc87d012-0e57-4be0-b4eb-a1e542306e53 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.488667] env[69027]: DEBUG oslo_vmware.api [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': task-3395241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067809} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.488896] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1715.489076] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1715.489246] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1715.489421] env[69027]: INFO nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1715.491419] env[69027]: DEBUG nova.compute.claims [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1715.491593] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.491805] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.505476] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1715.556245] env[69027]: DEBUG oslo_vmware.rw_handles [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e37f5109-76f7-4d22-8940-ecf188fe58dc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1715.616958] env[69027]: DEBUG oslo_vmware.rw_handles [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1715.617173] env[69027]: DEBUG oslo_vmware.rw_handles [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e37f5109-76f7-4d22-8940-ecf188fe58dc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1715.713645] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8c80cb-4984-4a24-aed1-0cc2b995204d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.720703] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d16d07-bb43-4f0d-a562-30579dc46182 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.749771] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33de421a-12a3-4a1c-8a7b-0200758d8c29 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.756235] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49e90ca-045e-46be-99cf-4cfd1e33bbc6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.768606] env[69027]: DEBUG nova.compute.provider_tree [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1715.778287] env[69027]: DEBUG nova.scheduler.client.report [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1715.793347] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.301s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.793875] env[69027]: ERROR nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1715.793875] env[69027]: Faults: ['InvalidArgument'] [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Traceback (most recent call last): [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] self.driver.spawn(context, instance, image_meta, [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] self._fetch_image_if_missing(context, vi) [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] image_cache(vi, tmp_image_ds_loc) [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] vm_util.copy_virtual_disk( [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] session._wait_for_task(vmdk_copy_task) [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] return self.wait_for_task(task_ref) [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] return evt.wait() [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] result = hub.switch() [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] return self.greenlet.switch() [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] self.f(*self.args, **self.kw) [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] raise exceptions.translate_fault(task_info.error) [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Faults: ['InvalidArgument'] [ 1715.793875] env[69027]: ERROR nova.compute.manager [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] [ 1715.794873] env[69027]: DEBUG nova.compute.utils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1715.795864] env[69027]: DEBUG nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Build of instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e was re-scheduled: A specified parameter was not correct: fileType [ 1715.795864] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1715.796251] env[69027]: DEBUG nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1715.796428] env[69027]: DEBUG nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1715.796602] env[69027]: DEBUG nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1715.796771] env[69027]: DEBUG nova.network.neutron [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1716.039806] env[69027]: DEBUG nova.network.neutron [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.054217] env[69027]: INFO nova.compute.manager [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Took 0.26 seconds to deallocate network for instance. [ 1716.140538] env[69027]: INFO nova.scheduler.client.report [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Deleted allocations for instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e [ 1716.161792] env[69027]: DEBUG oslo_concurrency.lockutils [None req-6e1d8559-c70d-4d1b-a0f5-272eba0e2394 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 587.802s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.163347] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 391.966s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.163587] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.163799] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.164057] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.166250] env[69027]: INFO nova.compute.manager [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Terminating instance [ 1716.168021] env[69027]: DEBUG nova.compute.manager [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1716.168197] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1716.168718] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f372998-aef0-4369-a0d3-a6a43afd0c37 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.180524] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bde4119-014f-4044-9feb-67b953d71c18 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.193403] env[69027]: DEBUG nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1716.215683] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e could not be found. [ 1716.215957] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1716.216192] env[69027]: INFO nova.compute.manager [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1716.216484] env[69027]: DEBUG oslo.service.loopingcall [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.216798] env[69027]: DEBUG nova.compute.manager [-] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1716.216856] env[69027]: DEBUG nova.network.neutron [-] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1716.242508] env[69027]: DEBUG nova.network.neutron [-] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.250695] env[69027]: INFO nova.compute.manager [-] [instance: 1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e] Took 0.03 seconds to deallocate network for instance. [ 1716.255644] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.255914] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.257265] env[69027]: INFO nova.compute.claims [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1716.335310] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c706f9b7-dc45-4501-b9fc-f14524705101 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "1aa2c1cb-0b09-46d5-bd7b-19b66a36ee8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.427506] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a699100-6018-43ed-bb67-d267d7fece3b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.435297] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804d25a3-61a2-4a03-b042-70959cb898c6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.465731] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82afff9-4b09-4294-8f5c-2e72374aac89 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.472835] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0908b7f0-5f1b-400d-befb-65b0fe2cd54d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.485827] env[69027]: DEBUG nova.compute.provider_tree [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1716.496204] env[69027]: DEBUG nova.scheduler.client.report [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1716.509352] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.253s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.509817] env[69027]: DEBUG nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1716.544907] env[69027]: DEBUG nova.compute.utils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1716.546390] env[69027]: DEBUG nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1716.546647] env[69027]: DEBUG nova.network.neutron [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1716.554911] env[69027]: DEBUG nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1716.602360] env[69027]: DEBUG nova.policy [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '79047db126994a79b8ffe3af1c7c43c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '459c3e2c96a047ddac468e91fbe438b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1716.614349] env[69027]: DEBUG nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1716.638406] env[69027]: DEBUG nova.virt.hardware [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1716.638713] env[69027]: DEBUG nova.virt.hardware [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1716.638876] env[69027]: DEBUG nova.virt.hardware [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1716.639070] env[69027]: DEBUG nova.virt.hardware [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1716.639223] env[69027]: DEBUG nova.virt.hardware [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1716.639370] env[69027]: DEBUG nova.virt.hardware [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1716.639733] env[69027]: DEBUG nova.virt.hardware [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1716.639940] env[69027]: DEBUG nova.virt.hardware [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1716.640130] env[69027]: DEBUG nova.virt.hardware [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1716.640295] env[69027]: DEBUG nova.virt.hardware [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1716.640469] env[69027]: DEBUG nova.virt.hardware [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1716.641312] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ce69e8-3e64-426f-8dd5-7d23ce5058a1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.649032] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a792d6b9-7ce8-4b15-8b18-871089b243e7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.063201] env[69027]: DEBUG nova.network.neutron [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Successfully created port: 008fd459-b5df-4549-9ff6-f637d73c1ec1 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1717.832414] env[69027]: DEBUG nova.network.neutron [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Successfully updated port: 008fd459-b5df-4549-9ff6-f637d73c1ec1 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1717.847041] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "refresh_cache-01d7b088-73b4-4624-b013-2da51bf78767" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.847194] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquired lock "refresh_cache-01d7b088-73b4-4624-b013-2da51bf78767" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.847351] env[69027]: DEBUG nova.network.neutron [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1717.900110] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.911420] env[69027]: DEBUG nova.network.neutron [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1718.077839] env[69027]: DEBUG nova.compute.manager [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Received event network-vif-plugged-008fd459-b5df-4549-9ff6-f637d73c1ec1 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1718.078060] env[69027]: DEBUG oslo_concurrency.lockutils [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] Acquiring lock "01d7b088-73b4-4624-b013-2da51bf78767-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.078279] env[69027]: DEBUG oslo_concurrency.lockutils [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] Lock "01d7b088-73b4-4624-b013-2da51bf78767-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.078467] env[69027]: DEBUG oslo_concurrency.lockutils [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] Lock "01d7b088-73b4-4624-b013-2da51bf78767-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.078672] env[69027]: DEBUG nova.compute.manager [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] No waiting events found dispatching network-vif-plugged-008fd459-b5df-4549-9ff6-f637d73c1ec1 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1718.078839] env[69027]: WARNING nova.compute.manager [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Received unexpected event network-vif-plugged-008fd459-b5df-4549-9ff6-f637d73c1ec1 for instance with vm_state building and task_state spawning. [ 1718.079073] env[69027]: DEBUG nova.compute.manager [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Received event network-changed-008fd459-b5df-4549-9ff6-f637d73c1ec1 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1718.079274] env[69027]: DEBUG nova.compute.manager [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Refreshing instance network info cache due to event network-changed-008fd459-b5df-4549-9ff6-f637d73c1ec1. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1718.079449] env[69027]: DEBUG oslo_concurrency.lockutils [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] Acquiring lock "refresh_cache-01d7b088-73b4-4624-b013-2da51bf78767" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.128781] env[69027]: DEBUG nova.network.neutron [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Updating instance_info_cache with network_info: [{"id": "008fd459-b5df-4549-9ff6-f637d73c1ec1", "address": "fa:16:3e:9f:e4:90", "network": {"id": "b30e6669-7c3a-4191-8eeb-47271f0397fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2133150353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "459c3e2c96a047ddac468e91fbe438b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap008fd459-b5", "ovs_interfaceid": "008fd459-b5df-4549-9ff6-f637d73c1ec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.140368] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Releasing lock "refresh_cache-01d7b088-73b4-4624-b013-2da51bf78767" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.140617] env[69027]: DEBUG nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Instance network_info: |[{"id": "008fd459-b5df-4549-9ff6-f637d73c1ec1", "address": "fa:16:3e:9f:e4:90", "network": {"id": "b30e6669-7c3a-4191-8eeb-47271f0397fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2133150353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "459c3e2c96a047ddac468e91fbe438b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap008fd459-b5", "ovs_interfaceid": "008fd459-b5df-4549-9ff6-f637d73c1ec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1718.140925] env[69027]: DEBUG oslo_concurrency.lockutils [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] Acquired lock "refresh_cache-01d7b088-73b4-4624-b013-2da51bf78767" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.141128] env[69027]: DEBUG nova.network.neutron [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Refreshing network info cache for port 008fd459-b5df-4549-9ff6-f637d73c1ec1 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1718.142128] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:e4:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '664c466b-9417-49d7-83cc-364d964c403a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '008fd459-b5df-4549-9ff6-f637d73c1ec1', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1718.149621] env[69027]: DEBUG oslo.service.loopingcall [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1718.152543] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1718.153016] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70dd74e4-caa6-447c-bdee-e597a885e9f5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.179989] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1718.179989] env[69027]: value = "task-3395242" [ 1718.179989] env[69027]: _type = "Task" [ 1718.179989] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.191075] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395242, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.440012] env[69027]: DEBUG nova.network.neutron [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Updated VIF entry in instance network info cache for port 008fd459-b5df-4549-9ff6-f637d73c1ec1. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1718.440429] env[69027]: DEBUG nova.network.neutron [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Updating instance_info_cache with network_info: [{"id": "008fd459-b5df-4549-9ff6-f637d73c1ec1", "address": "fa:16:3e:9f:e4:90", "network": {"id": "b30e6669-7c3a-4191-8eeb-47271f0397fe", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2133150353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "459c3e2c96a047ddac468e91fbe438b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap008fd459-b5", "ovs_interfaceid": "008fd459-b5df-4549-9ff6-f637d73c1ec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.450815] env[69027]: DEBUG oslo_concurrency.lockutils [req-3260b5e4-9520-4a56-a497-5dcaa55a2537 req-aabde4db-3d93-4f66-b7c3-2ccf93015a06 service nova] Releasing lock "refresh_cache-01d7b088-73b4-4624-b013-2da51bf78767" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.690107] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395242, 'name': CreateVM_Task, 'duration_secs': 0.296586} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.690277] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1718.690955] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.691149] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.691469] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1718.691717] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4089e96-088a-4b8e-b61e-e2a3df2c6a47 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.696118] env[69027]: DEBUG oslo_vmware.api [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for the task: (returnval){ [ 1718.696118] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52b7c35b-24ad-6554-3774-c767442b4e66" [ 1718.696118] env[69027]: _type = "Task" [ 1718.696118] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.703410] env[69027]: DEBUG oslo_vmware.api [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52b7c35b-24ad-6554-3774-c767442b4e66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.207101] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.207436] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1719.207436] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.771448] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.771848] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1744.771848] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1744.793861] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1744.793993] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1744.794107] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1744.794225] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1744.794348] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1744.794467] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1744.794586] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1744.794704] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1744.794821] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1744.794935] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1744.795062] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1744.795535] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1744.806583] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.806792] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.806958] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.807121] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1744.808177] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9021a31c-6bb3-4522-8a37-ad0a1c27a424 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.816941] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eadda714-0361-452d-9318-43740edc7663 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.831632] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8adb03-5e88-44c3-908f-34aa7547239d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.837495] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ef5ff3-6482-485c-8de9-24e2ede3fb9d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.865196] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180990MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1744.865388] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.865535] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.000845] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.001027] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.001182] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03d9d361-da15-4fb7-acfb-049098183bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.001378] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b930e792-b0a8-45e4-9330-befac22182b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.001513] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.001636] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.001753] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1d3442ae-f46f-433d-bccb-f323463e3a21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.001872] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.001985] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.002112] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 01d7b088-73b4-4624-b013-2da51bf78767 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1745.002319] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1745.002455] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1745.115861] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8d590c-544c-4a31-afa6-6bbba80c38fe {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.123473] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d792c4-a6cc-46e3-9d36-d922c39550fc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.154287] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88bb8fcb-8774-483e-81cf-cae3f2ee5387 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.161356] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf55118-68c4-4fd5-ab1c-b8e2068850bf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.174730] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1745.183569] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1745.197389] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1745.197492] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.332s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.426296] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1745.446575] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Getting list of instances from cluster (obj){ [ 1745.446575] env[69027]: value = "domain-c8" [ 1745.446575] env[69027]: _type = "ClusterComputeResource" [ 1745.446575] env[69027]: } {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1745.447873] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63728639-a1a6-4da6-818d-7287fbd0a7c5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.464572] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Got total of 10 instances {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1745.464736] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid f981fe25-52bd-46e7-920e-1f73ca37d9a3 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1745.464931] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 39ee164e-5c7c-44cf-9767-cef1b8560bfb {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1745.465115] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 03d9d361-da15-4fb7-acfb-049098183bc3 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1745.465276] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid b930e792-b0a8-45e4-9330-befac22182b7 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1745.465429] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1745.465580] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1745.465729] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 1d3442ae-f46f-433d-bccb-f323463e3a21 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1745.465891] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 1cbeaaaf-7915-47ee-be61-52f8e05403d9 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1745.466052] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1745.466203] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 01d7b088-73b4-4624-b013-2da51bf78767 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1745.466518] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.466775] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.466985] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "03d9d361-da15-4fb7-acfb-049098183bc3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.467200] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "b930e792-b0a8-45e4-9330-befac22182b7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.467396] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.467590] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.467782] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "1d3442ae-f46f-433d-bccb-f323463e3a21" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.467978] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.468193] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.468384] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "01d7b088-73b4-4624-b013-2da51bf78767" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.808821] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1746.809246] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1748.771562] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1749.767141] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1749.790722] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1750.772043] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1750.772216] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1751.771582] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1751.771887] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1753.771457] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1754.778236] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1754.778561] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances with incomplete migration {{(pid=69027) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1762.620975] env[69027]: WARNING oslo_vmware.rw_handles [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1762.620975] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1762.620975] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1762.620975] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1762.620975] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1762.620975] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1762.620975] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1762.620975] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1762.620975] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1762.620975] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1762.620975] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1762.620975] env[69027]: ERROR oslo_vmware.rw_handles [ 1762.621629] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/e37f5109-76f7-4d22-8940-ecf188fe58dc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1762.623640] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1762.623946] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Copying Virtual Disk [datastore2] vmware_temp/e37f5109-76f7-4d22-8940-ecf188fe58dc/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/e37f5109-76f7-4d22-8940-ecf188fe58dc/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1762.624308] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-443e1a2c-b4a9-4936-bf98-2bb7a9aeaf35 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.632322] env[69027]: DEBUG oslo_vmware.api [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for the task: (returnval){ [ 1762.632322] env[69027]: value = "task-3395243" [ 1762.632322] env[69027]: _type = "Task" [ 1762.632322] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.640302] env[69027]: DEBUG oslo_vmware.api [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Task: {'id': task-3395243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.144230] env[69027]: DEBUG oslo_vmware.exceptions [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1763.144579] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.145189] env[69027]: ERROR nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1763.145189] env[69027]: Faults: ['InvalidArgument'] [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Traceback (most recent call last): [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] yield resources [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] self.driver.spawn(context, instance, image_meta, [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] self._fetch_image_if_missing(context, vi) [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] image_cache(vi, tmp_image_ds_loc) [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] vm_util.copy_virtual_disk( [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] session._wait_for_task(vmdk_copy_task) [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] return self.wait_for_task(task_ref) [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] return evt.wait() [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] result = hub.switch() [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] return self.greenlet.switch() [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] self.f(*self.args, **self.kw) [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] raise exceptions.translate_fault(task_info.error) [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Faults: ['InvalidArgument'] [ 1763.145189] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] [ 1763.146257] env[69027]: INFO nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Terminating instance [ 1763.147180] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.147391] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1763.147632] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be5891c1-14a3-4884-82c7-23fca9036aa6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.149817] env[69027]: DEBUG nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1763.150162] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1763.150805] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1870d21-ba8a-4e99-95e3-5f04f632e246 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.157893] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1763.158973] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b55306f0-34c2-4aed-b206-8385296f91bf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.160368] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1763.160546] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1763.161243] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67ac71ce-4431-4dd2-b4c9-208fcedef5a6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.166039] env[69027]: DEBUG oslo_vmware.api [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for the task: (returnval){ [ 1763.166039] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]520e0f3d-0092-2109-30d1-c7a74b9f6145" [ 1763.166039] env[69027]: _type = "Task" [ 1763.166039] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.173285] env[69027]: DEBUG oslo_vmware.api [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]520e0f3d-0092-2109-30d1-c7a74b9f6145, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.226414] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1763.226599] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1763.226775] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Deleting the datastore file [datastore2] f981fe25-52bd-46e7-920e-1f73ca37d9a3 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1763.227056] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9187e04-3915-44c8-b165-e3d2316bbed2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.232990] env[69027]: DEBUG oslo_vmware.api [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for the task: (returnval){ [ 1763.232990] env[69027]: value = "task-3395245" [ 1763.232990] env[69027]: _type = "Task" [ 1763.232990] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.240379] env[69027]: DEBUG oslo_vmware.api [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Task: {'id': task-3395245, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.675913] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1763.676276] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Creating directory with path [datastore2] vmware_temp/a9d6d0b7-cd62-4837-9de8-704304a4bf11/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1763.676440] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dc7dba8-4273-4f4c-800d-f535f8c0a0c1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.687018] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Created directory with path [datastore2] vmware_temp/a9d6d0b7-cd62-4837-9de8-704304a4bf11/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1763.687224] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Fetch image to [datastore2] vmware_temp/a9d6d0b7-cd62-4837-9de8-704304a4bf11/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1763.687394] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/a9d6d0b7-cd62-4837-9de8-704304a4bf11/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1763.688115] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56350ec7-6d65-42cc-846f-68abed93541d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.694381] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c0ce00-eb0b-4200-84f6-154fd0c5ea70 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.703119] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8127c3a8-4500-4f7d-aa9f-394574c1b389 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.736887] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484c97a6-ffd3-4eeb-9c6c-4d262ab66211 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.743318] env[69027]: DEBUG oslo_vmware.api [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Task: {'id': task-3395245, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073154} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.745453] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1763.745647] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1763.745822] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1763.745995] env[69027]: INFO nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1763.747728] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6e873ea1-51b4-48f8-a0d8-9675518add44 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.749580] env[69027]: DEBUG nova.compute.claims [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1763.749758] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.749975] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.774054] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1763.826120] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a9d6d0b7-cd62-4837-9de8-704304a4bf11/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1763.887887] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1763.888131] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a9d6d0b7-cd62-4837-9de8-704304a4bf11/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1763.968353] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44b28a5-b433-48dd-bc31-c4c1c7188004 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.976215] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f86930-4d2f-405f-a876-1647ed5d6555 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.006734] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abaf9295-da80-4c01-b674-89f3cce89850 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.013960] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5228bae-a7fc-48aa-b97d-625cd0f11eb3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.026725] env[69027]: DEBUG nova.compute.provider_tree [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1764.035303] env[69027]: DEBUG nova.scheduler.client.report [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1764.049960] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.300s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.050517] env[69027]: ERROR nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1764.050517] env[69027]: Faults: ['InvalidArgument'] [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Traceback (most recent call last): [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] self.driver.spawn(context, instance, image_meta, [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] self._fetch_image_if_missing(context, vi) [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] image_cache(vi, tmp_image_ds_loc) [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] vm_util.copy_virtual_disk( [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] session._wait_for_task(vmdk_copy_task) [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] return self.wait_for_task(task_ref) [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] return evt.wait() [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] result = hub.switch() [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] return self.greenlet.switch() [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] self.f(*self.args, **self.kw) [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] raise exceptions.translate_fault(task_info.error) [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Faults: ['InvalidArgument'] [ 1764.050517] env[69027]: ERROR nova.compute.manager [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] [ 1764.051432] env[69027]: DEBUG nova.compute.utils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1764.052670] env[69027]: DEBUG nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Build of instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 was re-scheduled: A specified parameter was not correct: fileType [ 1764.052670] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1764.053052] env[69027]: DEBUG nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1764.053234] env[69027]: DEBUG nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1764.053418] env[69027]: DEBUG nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1764.053580] env[69027]: DEBUG nova.network.neutron [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1764.421331] env[69027]: DEBUG nova.network.neutron [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.434069] env[69027]: INFO nova.compute.manager [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Took 0.38 seconds to deallocate network for instance. [ 1764.521838] env[69027]: INFO nova.scheduler.client.report [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Deleted allocations for instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 [ 1764.543173] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d380a3ed-d999-4fca-91c1-0b0e5131b2ab tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 535.789s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.543173] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 143.348s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.543173] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.543424] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.543506] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.546246] env[69027]: INFO nova.compute.manager [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Terminating instance [ 1764.548611] env[69027]: DEBUG nova.compute.manager [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1764.549225] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1764.549225] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-079daea8-7d3d-48d8-a5c2-1f2c95dd39bd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.559731] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2402b780-b5ed-4826-9172-76984a3b48ec {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.588591] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f981fe25-52bd-46e7-920e-1f73ca37d9a3 could not be found. [ 1764.588823] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1764.589022] env[69027]: INFO nova.compute.manager [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1764.589276] env[69027]: DEBUG oslo.service.loopingcall [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1764.589522] env[69027]: DEBUG nova.compute.manager [-] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1764.589619] env[69027]: DEBUG nova.network.neutron [-] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1764.776622] env[69027]: DEBUG nova.network.neutron [-] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.785266] env[69027]: INFO nova.compute.manager [-] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] Took 0.20 seconds to deallocate network for instance. [ 1764.884617] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bd35d709-3523-4d99-969a-2f642848d065 tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.342s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.885706] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 19.419s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.885901] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f981fe25-52bd-46e7-920e-1f73ca37d9a3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1764.886088] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "f981fe25-52bd-46e7-920e-1f73ca37d9a3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.784605] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1765.784909] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1765.796484] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] There are 0 instances to clean {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1767.311483] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquiring lock "e379ca79-9458-464d-b07f-f651e474ebd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.311807] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Lock "e379ca79-9458-464d-b07f-f651e474ebd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.321455] env[69027]: DEBUG nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1767.370281] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.371956] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.372110] env[69027]: INFO nova.compute.claims [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1767.526477] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b03b8ab-7775-4e45-bced-8fa78aa78d45 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.533781] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd5f3ef-498a-4e7a-af8f-db9b73baf9a7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.564301] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af4813e2-15bf-463e-94a7-7d83768a8373 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.570915] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54bdc89f-e36d-406c-804b-5b42815af391 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.583837] env[69027]: DEBUG nova.compute.provider_tree [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1767.592329] env[69027]: DEBUG nova.scheduler.client.report [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1767.608047] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.238s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.608436] env[69027]: DEBUG nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1767.638661] env[69027]: DEBUG nova.compute.utils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1767.640468] env[69027]: DEBUG nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Not allocating networking since 'none' was specified. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1767.649079] env[69027]: DEBUG nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1767.709545] env[69027]: DEBUG nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1767.735052] env[69027]: DEBUG nova.virt.hardware [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1767.735317] env[69027]: DEBUG nova.virt.hardware [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1767.735476] env[69027]: DEBUG nova.virt.hardware [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1767.735661] env[69027]: DEBUG nova.virt.hardware [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1767.735809] env[69027]: DEBUG nova.virt.hardware [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1767.735958] env[69027]: DEBUG nova.virt.hardware [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1767.736185] env[69027]: DEBUG nova.virt.hardware [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1767.736351] env[69027]: DEBUG nova.virt.hardware [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1767.736517] env[69027]: DEBUG nova.virt.hardware [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1767.736680] env[69027]: DEBUG nova.virt.hardware [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1767.736899] env[69027]: DEBUG nova.virt.hardware [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1767.737775] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cefb5a9-9c2c-40bd-ad82-474f7df0393d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.745780] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee3f4f9-0332-4e18-b550-0f1983786dab {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.758601] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Instance VIF info [] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1767.764146] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Creating folder: Project (1c4bf2f527aa43999e7a1332e06c5a19). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1767.764397] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a4e2e01-235a-4c3c-bfdd-8bce33044afe {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.773459] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Created folder: Project (1c4bf2f527aa43999e7a1332e06c5a19) in parent group-v677321. [ 1767.773606] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Creating folder: Instances. Parent ref: group-v677417. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1767.773818] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-226ea0ea-8138-4545-bae7-433caa4475c1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.781901] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Created folder: Instances in parent group-v677417. [ 1767.782133] env[69027]: DEBUG oslo.service.loopingcall [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1767.782311] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1767.782499] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14c76d4c-bc0e-484e-8f03-980070b5bc6f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.799341] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1767.799341] env[69027]: value = "task-3395248" [ 1767.799341] env[69027]: _type = "Task" [ 1767.799341] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.806063] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395248, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.309992] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395248, 'name': CreateVM_Task, 'duration_secs': 0.243796} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.310194] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1768.310629] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1768.310791] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1768.311157] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1768.311420] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65c84a4d-34a6-4767-b1f5-9af2f7a3753c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.315844] env[69027]: DEBUG oslo_vmware.api [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Waiting for the task: (returnval){ [ 1768.315844] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52fe358c-9a97-781b-a542-df62882fa974" [ 1768.315844] env[69027]: _type = "Task" [ 1768.315844] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.324354] env[69027]: DEBUG oslo_vmware.api [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52fe358c-9a97-781b-a542-df62882fa974, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.826712] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.826905] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1768.827128] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.858412] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquiring lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.858735] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.783701] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1804.784050] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1804.784050] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1804.807243] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1804.807436] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1804.807545] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1804.807673] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1804.807797] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1804.807917] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1804.808169] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1804.808332] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1804.808458] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1804.808576] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1804.808695] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1806.771732] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1806.772030] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1806.783216] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.783378] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.783533] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.783691] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1806.785121] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a18049a-bd6d-4559-9bf5-f51084e035a0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.793897] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8440512-73e2-49b7-ba32-8a81162b3783 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.807446] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ab8eb1-2de9-40a2-bb00-13539c7fa08f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.813459] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2607b4-40c0-4e44-a5df-f908ee2fcd36 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.842913] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180985MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1806.843075] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.843254] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.910512] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1806.910668] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 03d9d361-da15-4fb7-acfb-049098183bc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1806.910791] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b930e792-b0a8-45e4-9330-befac22182b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1806.910908] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1806.911041] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1806.911166] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1d3442ae-f46f-433d-bccb-f323463e3a21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1806.911281] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1806.911397] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1806.911512] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 01d7b088-73b4-4624-b013-2da51bf78767 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1806.911658] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance e379ca79-9458-464d-b07f-f651e474ebd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1806.922051] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1806.922266] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1806.922412] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1806.939338] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Refreshing inventories for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1806.952359] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Updating ProviderTree inventory for provider 4923c91f-3b2b-4ad1-a821-36209acae639 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1806.952562] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Updating inventory in ProviderTree for provider 4923c91f-3b2b-4ad1-a821-36209acae639 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1806.963119] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Refreshing aggregate associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, aggregates: None {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1806.981133] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Refreshing trait associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1807.097968] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7570f81-f619-479f-92a6-73156e2cfb7d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.105317] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82d34f1-d57c-41c7-88f1-699e1d1352a2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.134045] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e2edf5-23af-4159-8bcc-191fd34311e8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.140707] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714ccd32-1991-47b5-99c2-e5ada11f9fdd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.154396] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1807.162728] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1807.177597] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1807.177774] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.335s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.177172] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1809.771015] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1809.771339] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1810.017470] env[69027]: WARNING oslo_vmware.rw_handles [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1810.017470] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1810.017470] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1810.017470] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1810.017470] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1810.017470] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1810.017470] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1810.017470] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1810.017470] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1810.017470] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1810.017470] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1810.017470] env[69027]: ERROR oslo_vmware.rw_handles [ 1810.018050] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/a9d6d0b7-cd62-4837-9de8-704304a4bf11/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1810.020312] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1810.020572] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Copying Virtual Disk [datastore2] vmware_temp/a9d6d0b7-cd62-4837-9de8-704304a4bf11/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/a9d6d0b7-cd62-4837-9de8-704304a4bf11/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1810.020861] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbbd7c13-23bb-4e6c-9d4b-6e9b5c9b3421 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.030627] env[69027]: DEBUG oslo_vmware.api [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for the task: (returnval){ [ 1810.030627] env[69027]: value = "task-3395249" [ 1810.030627] env[69027]: _type = "Task" [ 1810.030627] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.038348] env[69027]: DEBUG oslo_vmware.api [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Task: {'id': task-3395249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.540700] env[69027]: DEBUG oslo_vmware.exceptions [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1810.540997] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.541593] env[69027]: ERROR nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1810.541593] env[69027]: Faults: ['InvalidArgument'] [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Traceback (most recent call last): [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] yield resources [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] self.driver.spawn(context, instance, image_meta, [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] self._fetch_image_if_missing(context, vi) [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] image_cache(vi, tmp_image_ds_loc) [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] vm_util.copy_virtual_disk( [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] session._wait_for_task(vmdk_copy_task) [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] return self.wait_for_task(task_ref) [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] return evt.wait() [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] result = hub.switch() [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] return self.greenlet.switch() [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] self.f(*self.args, **self.kw) [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] raise exceptions.translate_fault(task_info.error) [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Faults: ['InvalidArgument'] [ 1810.541593] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] [ 1810.542843] env[69027]: INFO nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Terminating instance [ 1810.543483] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.543692] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1810.543921] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2cc644c-67cb-4860-84b7-909edd9fec7c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.545996] env[69027]: DEBUG nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1810.546205] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1810.546888] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c59efa6-de8d-4dd4-902a-3aa86014bcdb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.553350] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1810.553556] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8548ccd0-fd9c-4632-868d-d6f3407544c9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.555523] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1810.555700] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1810.556620] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66ba1eb0-bbe3-452e-945b-9d647cff8a11 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.561468] env[69027]: DEBUG oslo_vmware.api [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for the task: (returnval){ [ 1810.561468] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]527fc4e1-d483-cac8-8901-6d6a506f07a6" [ 1810.561468] env[69027]: _type = "Task" [ 1810.561468] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.574989] env[69027]: DEBUG oslo_vmware.api [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]527fc4e1-d483-cac8-8901-6d6a506f07a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.629325] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1810.629567] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1810.629751] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Deleting the datastore file [datastore2] 39ee164e-5c7c-44cf-9767-cef1b8560bfb {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1810.630044] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac97c5c4-c83f-4689-830d-16f8cfe40638 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.636704] env[69027]: DEBUG oslo_vmware.api [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for the task: (returnval){ [ 1810.636704] env[69027]: value = "task-3395251" [ 1810.636704] env[69027]: _type = "Task" [ 1810.636704] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.643986] env[69027]: DEBUG oslo_vmware.api [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Task: {'id': task-3395251, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.071198] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1811.071539] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Creating directory with path [datastore2] vmware_temp/c270d616-baef-4cf8-a75c-4d762f5579e6/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1811.071684] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0c2df31-7233-4137-a296-2ddf88c46584 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.082402] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Created directory with path [datastore2] vmware_temp/c270d616-baef-4cf8-a75c-4d762f5579e6/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1811.082584] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Fetch image to [datastore2] vmware_temp/c270d616-baef-4cf8-a75c-4d762f5579e6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1811.082741] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/c270d616-baef-4cf8-a75c-4d762f5579e6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1811.083446] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb2ff53-337b-4ca4-9364-2f751a87b91c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.089557] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214a174d-44e9-4f83-92ef-7532a1ad6933 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.098253] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d3169c-03ff-455c-8b23-18cd381aaee6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.128826] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14efc91-80f6-4db5-9c60-993013ea6a09 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.134306] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5dccde84-f68e-44d8-a9a5-b39c0e25bd24 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.144034] env[69027]: DEBUG oslo_vmware.api [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Task: {'id': task-3395251, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068773} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.144270] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1811.144455] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1811.144627] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1811.144799] env[69027]: INFO nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1811.149268] env[69027]: DEBUG nova.compute.claims [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1811.149456] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.149673] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.155677] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1811.216812] env[69027]: DEBUG oslo_vmware.rw_handles [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c270d616-baef-4cf8-a75c-4d762f5579e6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1811.277727] env[69027]: DEBUG oslo_vmware.rw_handles [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1811.277913] env[69027]: DEBUG oslo_vmware.rw_handles [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c270d616-baef-4cf8-a75c-4d762f5579e6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1811.372263] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b86c222-b1fb-4ae7-b1ca-87dc2b5178ae {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.379631] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01eb5f7-a509-4f63-a929-361e5158ce91 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.408877] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e017748a-3cf7-49aa-9dc3-2c4e7c54038c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.415700] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d3421b-0a4e-4f4a-8640-858bea989964 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.428212] env[69027]: DEBUG nova.compute.provider_tree [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1811.436373] env[69027]: DEBUG nova.scheduler.client.report [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1811.453124] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.303s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.453692] env[69027]: ERROR nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1811.453692] env[69027]: Faults: ['InvalidArgument'] [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Traceback (most recent call last): [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] self.driver.spawn(context, instance, image_meta, [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] self._fetch_image_if_missing(context, vi) [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] image_cache(vi, tmp_image_ds_loc) [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] vm_util.copy_virtual_disk( [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] session._wait_for_task(vmdk_copy_task) [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] return self.wait_for_task(task_ref) [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] return evt.wait() [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] result = hub.switch() [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] return self.greenlet.switch() [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] self.f(*self.args, **self.kw) [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] raise exceptions.translate_fault(task_info.error) [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Faults: ['InvalidArgument'] [ 1811.453692] env[69027]: ERROR nova.compute.manager [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] [ 1811.454602] env[69027]: DEBUG nova.compute.utils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1811.455940] env[69027]: DEBUG nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Build of instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb was re-scheduled: A specified parameter was not correct: fileType [ 1811.455940] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1811.456321] env[69027]: DEBUG nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1811.456498] env[69027]: DEBUG nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1811.456667] env[69027]: DEBUG nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1811.456834] env[69027]: DEBUG nova.network.neutron [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1811.753358] env[69027]: DEBUG nova.network.neutron [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.764029] env[69027]: INFO nova.compute.manager [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Took 0.31 seconds to deallocate network for instance. [ 1811.770508] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.770767] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1811.849740] env[69027]: INFO nova.scheduler.client.report [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Deleted allocations for instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb [ 1811.875441] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c9ffc4bf-c15d-4fd0-8daa-0b788a9d0a5e tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 582.667s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.876546] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 387.164s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.876770] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Acquiring lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.876971] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.877201] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.879649] env[69027]: INFO nova.compute.manager [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Terminating instance [ 1811.881178] env[69027]: DEBUG nova.compute.manager [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1811.881553] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1811.881998] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-acb33d96-1525-4ea6-b8e3-f3550f30c346 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.887996] env[69027]: DEBUG nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1811.894677] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd36cd5-c55f-4abe-a9f7-8313cdc8bc9f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.923811] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 39ee164e-5c7c-44cf-9767-cef1b8560bfb could not be found. [ 1811.923978] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1811.924177] env[69027]: INFO nova.compute.manager [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1811.924491] env[69027]: DEBUG oslo.service.loopingcall [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1811.924644] env[69027]: DEBUG nova.compute.manager [-] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1811.924737] env[69027]: DEBUG nova.network.neutron [-] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1811.943655] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.943898] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.945359] env[69027]: INFO nova.compute.claims [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1811.948736] env[69027]: DEBUG nova.network.neutron [-] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1811.956621] env[69027]: INFO nova.compute.manager [-] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] Took 0.03 seconds to deallocate network for instance. [ 1812.073760] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b746345d-ab12-47d7-bdcc-1763dca96b0f tempest-ServerRescueNegativeTestJSON-1743272271 tempest-ServerRescueNegativeTestJSON-1743272271-project-member] Lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.074994] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 66.608s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.075201] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 39ee164e-5c7c-44cf-9767-cef1b8560bfb] During sync_power_state the instance has a pending task (deleting). Skip. [ 1812.075404] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "39ee164e-5c7c-44cf-9767-cef1b8560bfb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.123187] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70786fb9-70bc-4cbf-ad33-98fde711e3dc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.131081] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7520c6f8-4c3c-43be-8786-4779a7867d30 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.161239] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8737cee6-6578-472f-90e2-6e73c21b147e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.168150] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7abe557-2336-4296-8ac0-4e703d14a896 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.180747] env[69027]: DEBUG nova.compute.provider_tree [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1812.188703] env[69027]: DEBUG nova.scheduler.client.report [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1812.202435] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.258s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.202905] env[69027]: DEBUG nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1812.242144] env[69027]: DEBUG nova.compute.utils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1812.243920] env[69027]: DEBUG nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1812.244131] env[69027]: DEBUG nova.network.neutron [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1812.252241] env[69027]: DEBUG nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1812.312666] env[69027]: DEBUG nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1812.338029] env[69027]: DEBUG nova.virt.hardware [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1812.338348] env[69027]: DEBUG nova.virt.hardware [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1812.338661] env[69027]: DEBUG nova.virt.hardware [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1812.338927] env[69027]: DEBUG nova.virt.hardware [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1812.339107] env[69027]: DEBUG nova.virt.hardware [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1812.339279] env[69027]: DEBUG nova.virt.hardware [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1812.339572] env[69027]: DEBUG nova.virt.hardware [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1812.339733] env[69027]: DEBUG nova.virt.hardware [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1812.339931] env[69027]: DEBUG nova.virt.hardware [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1812.340130] env[69027]: DEBUG nova.virt.hardware [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1812.340317] env[69027]: DEBUG nova.virt.hardware [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1812.341219] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d885661b-c3aa-489e-a695-f91da483a968 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.345772] env[69027]: DEBUG nova.policy [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3989b31368e34b108f730d2edfd3d1c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c192e1bf5f84815802f1a938f4b2685', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1812.352476] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe047eb7-a978-48f9-a1d2-214d916339aa {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.660139] env[69027]: DEBUG nova.network.neutron [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Successfully created port: 091bf948-1c5d-4c83-b431-55dfb32ce2f6 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1812.770906] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1812.771104] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1813.333856] env[69027]: DEBUG nova.network.neutron [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Successfully updated port: 091bf948-1c5d-4c83-b431-55dfb32ce2f6 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1813.346456] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquiring lock "refresh_cache-9f12e2a1-852b-4d55-8e38-ddeb9adb3053" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.346768] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquired lock "refresh_cache-9f12e2a1-852b-4d55-8e38-ddeb9adb3053" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.347034] env[69027]: DEBUG nova.network.neutron [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1813.387800] env[69027]: DEBUG nova.network.neutron [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1813.593635] env[69027]: DEBUG nova.network.neutron [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Updating instance_info_cache with network_info: [{"id": "091bf948-1c5d-4c83-b431-55dfb32ce2f6", "address": "fa:16:3e:eb:64:b5", "network": {"id": "cb75dda9-ee29-4fea-954e-488fadad683e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2047818938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c192e1bf5f84815802f1a938f4b2685", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap091bf948-1c", "ovs_interfaceid": "091bf948-1c5d-4c83-b431-55dfb32ce2f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.605860] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Releasing lock "refresh_cache-9f12e2a1-852b-4d55-8e38-ddeb9adb3053" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.606146] env[69027]: DEBUG nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Instance network_info: |[{"id": "091bf948-1c5d-4c83-b431-55dfb32ce2f6", "address": "fa:16:3e:eb:64:b5", "network": {"id": "cb75dda9-ee29-4fea-954e-488fadad683e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2047818938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c192e1bf5f84815802f1a938f4b2685", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap091bf948-1c", "ovs_interfaceid": "091bf948-1c5d-4c83-b431-55dfb32ce2f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1813.606541] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:64:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b7b7edd0-124a-48ec-ae26-1aa14f9b884a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '091bf948-1c5d-4c83-b431-55dfb32ce2f6', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1813.614155] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Creating folder: Project (5c192e1bf5f84815802f1a938f4b2685). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1813.614649] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0e484b8-cca6-4cea-8740-9e58154ea87c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.626754] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Created folder: Project (5c192e1bf5f84815802f1a938f4b2685) in parent group-v677321. [ 1813.626925] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Creating folder: Instances. Parent ref: group-v677420. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1813.627149] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51ca4f78-f496-442a-abbd-dc34067e88cb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.634703] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Created folder: Instances in parent group-v677420. [ 1813.634918] env[69027]: DEBUG oslo.service.loopingcall [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1813.635110] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1813.635294] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0d61db4-8c22-475a-82a4-44464a9da2e3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.653019] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1813.653019] env[69027]: value = "task-3395254" [ 1813.653019] env[69027]: _type = "Task" [ 1813.653019] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.660392] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395254, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.794132] env[69027]: DEBUG nova.compute.manager [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Received event network-vif-plugged-091bf948-1c5d-4c83-b431-55dfb32ce2f6 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1813.794419] env[69027]: DEBUG oslo_concurrency.lockutils [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] Acquiring lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.794712] env[69027]: DEBUG oslo_concurrency.lockutils [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] Lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.794966] env[69027]: DEBUG oslo_concurrency.lockutils [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] Lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.795204] env[69027]: DEBUG nova.compute.manager [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] No waiting events found dispatching network-vif-plugged-091bf948-1c5d-4c83-b431-55dfb32ce2f6 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1813.795439] env[69027]: WARNING nova.compute.manager [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Received unexpected event network-vif-plugged-091bf948-1c5d-4c83-b431-55dfb32ce2f6 for instance with vm_state building and task_state spawning. [ 1813.795677] env[69027]: DEBUG nova.compute.manager [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Received event network-changed-091bf948-1c5d-4c83-b431-55dfb32ce2f6 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1813.796082] env[69027]: DEBUG nova.compute.manager [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Refreshing instance network info cache due to event network-changed-091bf948-1c5d-4c83-b431-55dfb32ce2f6. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1813.796168] env[69027]: DEBUG oslo_concurrency.lockutils [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] Acquiring lock "refresh_cache-9f12e2a1-852b-4d55-8e38-ddeb9adb3053" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.796363] env[69027]: DEBUG oslo_concurrency.lockutils [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] Acquired lock "refresh_cache-9f12e2a1-852b-4d55-8e38-ddeb9adb3053" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.796587] env[69027]: DEBUG nova.network.neutron [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Refreshing network info cache for port 091bf948-1c5d-4c83-b431-55dfb32ce2f6 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1814.085355] env[69027]: DEBUG nova.network.neutron [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Updated VIF entry in instance network info cache for port 091bf948-1c5d-4c83-b431-55dfb32ce2f6. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1814.085786] env[69027]: DEBUG nova.network.neutron [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Updating instance_info_cache with network_info: [{"id": "091bf948-1c5d-4c83-b431-55dfb32ce2f6", "address": "fa:16:3e:eb:64:b5", "network": {"id": "cb75dda9-ee29-4fea-954e-488fadad683e", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-2047818938-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c192e1bf5f84815802f1a938f4b2685", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7b7edd0-124a-48ec-ae26-1aa14f9b884a", "external-id": "nsx-vlan-transportzone-861", "segmentation_id": 861, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap091bf948-1c", "ovs_interfaceid": "091bf948-1c5d-4c83-b431-55dfb32ce2f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1814.096050] env[69027]: DEBUG oslo_concurrency.lockutils [req-55a95b60-13da-41c5-8998-78e6ecd8fe84 req-da14fbc7-6008-45e1-bc13-e561357f9236 service nova] Releasing lock "refresh_cache-9f12e2a1-852b-4d55-8e38-ddeb9adb3053" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.163833] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395254, 'name': CreateVM_Task, 'duration_secs': 0.273254} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.164083] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1814.164835] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.164999] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.165361] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1814.165617] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5db49566-93a4-4303-bbe7-5d4d56feea98 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.170426] env[69027]: DEBUG oslo_vmware.api [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Waiting for the task: (returnval){ [ 1814.170426] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52b03bb6-0497-51d6-09bb-0f746ea48b63" [ 1814.170426] env[69027]: _type = "Task" [ 1814.170426] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.177927] env[69027]: DEBUG oslo_vmware.api [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52b03bb6-0497-51d6-09bb-0f746ea48b63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.681203] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.681479] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1814.681695] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.400057] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "01d7b088-73b4-4624-b013-2da51bf78767" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.399672] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.400017] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.253903] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.254231] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.660273] env[69027]: WARNING oslo_vmware.rw_handles [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1859.660273] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1859.660273] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1859.660273] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1859.660273] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1859.660273] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1859.660273] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1859.660273] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1859.660273] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1859.660273] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1859.660273] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1859.660273] env[69027]: ERROR oslo_vmware.rw_handles [ 1859.661050] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/c270d616-baef-4cf8-a75c-4d762f5579e6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1859.663067] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1859.663351] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Copying Virtual Disk [datastore2] vmware_temp/c270d616-baef-4cf8-a75c-4d762f5579e6/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/c270d616-baef-4cf8-a75c-4d762f5579e6/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1859.663662] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1161b58-1b4e-43d2-a9ad-0034b2209560 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.671426] env[69027]: DEBUG oslo_vmware.api [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for the task: (returnval){ [ 1859.671426] env[69027]: value = "task-3395255" [ 1859.671426] env[69027]: _type = "Task" [ 1859.671426] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.678988] env[69027]: DEBUG oslo_vmware.api [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Task: {'id': task-3395255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.181535] env[69027]: DEBUG oslo_vmware.exceptions [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1860.181877] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.182505] env[69027]: ERROR nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1860.182505] env[69027]: Faults: ['InvalidArgument'] [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Traceback (most recent call last): [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] yield resources [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] self.driver.spawn(context, instance, image_meta, [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] self._fetch_image_if_missing(context, vi) [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] image_cache(vi, tmp_image_ds_loc) [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] vm_util.copy_virtual_disk( [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] session._wait_for_task(vmdk_copy_task) [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] return self.wait_for_task(task_ref) [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] return evt.wait() [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] result = hub.switch() [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] return self.greenlet.switch() [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] self.f(*self.args, **self.kw) [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] raise exceptions.translate_fault(task_info.error) [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Faults: ['InvalidArgument'] [ 1860.182505] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] [ 1860.183721] env[69027]: INFO nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Terminating instance [ 1860.184525] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.184735] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1860.184973] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-926a627d-258f-4c5f-97a9-76db7ecf557d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.187365] env[69027]: DEBUG nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1860.187562] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1860.188301] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dddca46-bf9f-4813-b259-6c28c09c6bed {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.195341] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1860.195601] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3c43d6b-2ff4-40d9-80b9-1b3194112610 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.197688] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1860.197873] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1860.198869] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3502e2da-d4c9-44ba-a03c-9455e14b8803 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.203568] env[69027]: DEBUG oslo_vmware.api [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for the task: (returnval){ [ 1860.203568] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52ee321c-2bf3-a4a0-70a0-c52acdf92d7e" [ 1860.203568] env[69027]: _type = "Task" [ 1860.203568] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.210413] env[69027]: DEBUG oslo_vmware.api [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52ee321c-2bf3-a4a0-70a0-c52acdf92d7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.261617] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1860.261891] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1860.262165] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Deleting the datastore file [datastore2] 03d9d361-da15-4fb7-acfb-049098183bc3 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1860.262464] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74705fdb-8d79-462f-a5be-801d5e94d2fe {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.269363] env[69027]: DEBUG oslo_vmware.api [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for the task: (returnval){ [ 1860.269363] env[69027]: value = "task-3395257" [ 1860.269363] env[69027]: _type = "Task" [ 1860.269363] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.276797] env[69027]: DEBUG oslo_vmware.api [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Task: {'id': task-3395257, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.713813] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1860.714133] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Creating directory with path [datastore2] vmware_temp/3d82e268-d692-445f-a8d5-ae7d1d3799b8/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1860.714330] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-220b4458-be80-4efc-9e51-86fc83a2770d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.728801] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Created directory with path [datastore2] vmware_temp/3d82e268-d692-445f-a8d5-ae7d1d3799b8/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1860.728983] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Fetch image to [datastore2] vmware_temp/3d82e268-d692-445f-a8d5-ae7d1d3799b8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1860.729169] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/3d82e268-d692-445f-a8d5-ae7d1d3799b8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1860.729897] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea6cdba-8b68-41b3-8759-fd411542c08b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.736216] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19eff1d-1e9c-4da8-9424-945d44a6b2fb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.744785] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a24e70-de5b-4856-a5d8-9491d21322ed {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.777634] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15dce1aa-651f-4cea-9153-9403767ab7c0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.784292] env[69027]: DEBUG oslo_vmware.api [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Task: {'id': task-3395257, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074619} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.785654] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1860.785844] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1860.786028] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1860.786205] env[69027]: INFO nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1860.787930] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-037359fa-1209-43ff-b1b8-ead4d910c762 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.789757] env[69027]: DEBUG nova.compute.claims [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1860.789935] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.790165] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.815156] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1860.988107] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684642ba-4f07-48bf-99e7-ed63addc86ef {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.995451] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f9ba9b-e49f-40c8-b011-f5d94c1ad54f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.999258] env[69027]: DEBUG oslo_vmware.rw_handles [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3d82e268-d692-445f-a8d5-ae7d1d3799b8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1861.080778] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1000c803-e752-4c3d-9985-d3e9e89f8546 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.083441] env[69027]: DEBUG oslo_vmware.rw_handles [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1861.083608] env[69027]: DEBUG oslo_vmware.rw_handles [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3d82e268-d692-445f-a8d5-ae7d1d3799b8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1861.088304] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29b20b4-2696-412e-b223-c7e6da7a2efa {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.101300] env[69027]: DEBUG nova.compute.provider_tree [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.109540] env[69027]: DEBUG nova.scheduler.client.report [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1861.122710] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.332s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.123255] env[69027]: ERROR nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1861.123255] env[69027]: Faults: ['InvalidArgument'] [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Traceback (most recent call last): [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] self.driver.spawn(context, instance, image_meta, [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] self._fetch_image_if_missing(context, vi) [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] image_cache(vi, tmp_image_ds_loc) [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] vm_util.copy_virtual_disk( [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] session._wait_for_task(vmdk_copy_task) [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] return self.wait_for_task(task_ref) [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] return evt.wait() [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] result = hub.switch() [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] return self.greenlet.switch() [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] self.f(*self.args, **self.kw) [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] raise exceptions.translate_fault(task_info.error) [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Faults: ['InvalidArgument'] [ 1861.123255] env[69027]: ERROR nova.compute.manager [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] [ 1861.124183] env[69027]: DEBUG nova.compute.utils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1861.125255] env[69027]: DEBUG nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Build of instance 03d9d361-da15-4fb7-acfb-049098183bc3 was re-scheduled: A specified parameter was not correct: fileType [ 1861.125255] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1861.125619] env[69027]: DEBUG nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1861.125833] env[69027]: DEBUG nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1861.125957] env[69027]: DEBUG nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1861.126135] env[69027]: DEBUG nova.network.neutron [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1861.447322] env[69027]: DEBUG nova.network.neutron [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.457851] env[69027]: INFO nova.compute.manager [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Took 0.33 seconds to deallocate network for instance. [ 1861.556057] env[69027]: INFO nova.scheduler.client.report [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Deleted allocations for instance 03d9d361-da15-4fb7-acfb-049098183bc3 [ 1861.578439] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95b843b8-092f-48f4-8748-207386d06f04 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "03d9d361-da15-4fb7-acfb-049098183bc3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.127s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.579492] env[69027]: DEBUG oslo_concurrency.lockutils [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "03d9d361-da15-4fb7-acfb-049098183bc3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.437s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.579766] env[69027]: DEBUG oslo_concurrency.lockutils [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "03d9d361-da15-4fb7-acfb-049098183bc3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.579981] env[69027]: DEBUG oslo_concurrency.lockutils [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "03d9d361-da15-4fb7-acfb-049098183bc3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.580169] env[69027]: DEBUG oslo_concurrency.lockutils [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "03d9d361-da15-4fb7-acfb-049098183bc3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.583501] env[69027]: INFO nova.compute.manager [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Terminating instance [ 1861.585220] env[69027]: DEBUG nova.compute.manager [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1861.585409] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1861.585661] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ecdd50da-3153-45e8-8601-a8ca8ef14112 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.594954] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8352f6-2c17-47b7-a38c-0c1efdd0e8e7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.605591] env[69027]: DEBUG nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1861.625739] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 03d9d361-da15-4fb7-acfb-049098183bc3 could not be found. [ 1861.625943] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1861.626139] env[69027]: INFO nova.compute.manager [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1861.626398] env[69027]: DEBUG oslo.service.loopingcall [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1861.626611] env[69027]: DEBUG nova.compute.manager [-] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1861.626706] env[69027]: DEBUG nova.network.neutron [-] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1861.651406] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.651656] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.653118] env[69027]: INFO nova.compute.claims [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1861.656058] env[69027]: DEBUG nova.network.neutron [-] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.663748] env[69027]: INFO nova.compute.manager [-] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] Took 0.04 seconds to deallocate network for instance. [ 1861.754605] env[69027]: DEBUG oslo_concurrency.lockutils [None req-42732950-fc09-4286-bd64-d50c700e2524 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "03d9d361-da15-4fb7-acfb-049098183bc3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.755582] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "03d9d361-da15-4fb7-acfb-049098183bc3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 116.289s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.755769] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 03d9d361-da15-4fb7-acfb-049098183bc3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1861.755968] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "03d9d361-da15-4fb7-acfb-049098183bc3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.829917] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e781f18-48a4-40a1-9335-9b2d0b1563a0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.837605] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d32dd7-cb31-4871-ae37-a39f78d6ce34 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.866418] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1485f2e3-9e8e-4aee-96e5-3f78c301d5ca {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.872952] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c49b4c-f17b-4ede-a00f-37421a5dbcb0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.885350] env[69027]: DEBUG nova.compute.provider_tree [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.895938] env[69027]: DEBUG nova.scheduler.client.report [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1861.908884] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.257s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.909357] env[69027]: DEBUG nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1861.941885] env[69027]: DEBUG nova.compute.utils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1861.942490] env[69027]: DEBUG nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1861.942696] env[69027]: DEBUG nova.network.neutron [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1861.951952] env[69027]: DEBUG nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1862.015466] env[69027]: DEBUG nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1862.034182] env[69027]: DEBUG nova.policy [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58e66320388b4e8294205232eec8cfaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '947495558dff46eb9951fadfc3d12d32', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1862.039832] env[69027]: DEBUG nova.virt.hardware [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1862.040146] env[69027]: DEBUG nova.virt.hardware [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1862.040320] env[69027]: DEBUG nova.virt.hardware [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1862.040506] env[69027]: DEBUG nova.virt.hardware [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1862.040683] env[69027]: DEBUG nova.virt.hardware [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1862.040856] env[69027]: DEBUG nova.virt.hardware [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1862.041079] env[69027]: DEBUG nova.virt.hardware [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1862.041245] env[69027]: DEBUG nova.virt.hardware [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1862.041414] env[69027]: DEBUG nova.virt.hardware [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1862.041577] env[69027]: DEBUG nova.virt.hardware [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1862.041751] env[69027]: DEBUG nova.virt.hardware [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1862.042644] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2035d097-db5b-4b99-bd26-9a1c58910ada {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.050415] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7570a1ea-c808-480f-8758-3d7509f0e976 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.384065] env[69027]: DEBUG nova.network.neutron [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Successfully created port: 79d97c6a-c6c6-4ee8-b919-e9085e94bfbc {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1863.014400] env[69027]: DEBUG nova.network.neutron [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Successfully updated port: 79d97c6a-c6c6-4ee8-b919-e9085e94bfbc {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1863.025552] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "refresh_cache-f069ae93-e79f-4c89-99b8-f3ee70895ee6" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.025552] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquired lock "refresh_cache-f069ae93-e79f-4c89-99b8-f3ee70895ee6" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.025552] env[69027]: DEBUG nova.network.neutron [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1863.264018] env[69027]: DEBUG nova.network.neutron [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1863.456963] env[69027]: DEBUG nova.network.neutron [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Updating instance_info_cache with network_info: [{"id": "79d97c6a-c6c6-4ee8-b919-e9085e94bfbc", "address": "fa:16:3e:3f:49:f1", "network": {"id": "c071ab13-b295-41c7-b89e-e6a7e7eb2ed2", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-170762747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947495558dff46eb9951fadfc3d12d32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79d97c6a-c6", "ovs_interfaceid": "79d97c6a-c6c6-4ee8-b919-e9085e94bfbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.467980] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Releasing lock "refresh_cache-f069ae93-e79f-4c89-99b8-f3ee70895ee6" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.468317] env[69027]: DEBUG nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Instance network_info: |[{"id": "79d97c6a-c6c6-4ee8-b919-e9085e94bfbc", "address": "fa:16:3e:3f:49:f1", "network": {"id": "c071ab13-b295-41c7-b89e-e6a7e7eb2ed2", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-170762747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947495558dff46eb9951fadfc3d12d32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79d97c6a-c6", "ovs_interfaceid": "79d97c6a-c6c6-4ee8-b919-e9085e94bfbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1863.468744] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:49:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79d97c6a-c6c6-4ee8-b919-e9085e94bfbc', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1863.477189] env[69027]: DEBUG oslo.service.loopingcall [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1863.477681] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1863.477927] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54e4942d-51d0-43ee-8eaa-4deb7434a18e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.494409] env[69027]: DEBUG nova.compute.manager [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Received event network-vif-plugged-79d97c6a-c6c6-4ee8-b919-e9085e94bfbc {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1863.494611] env[69027]: DEBUG oslo_concurrency.lockutils [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] Acquiring lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.494809] env[69027]: DEBUG oslo_concurrency.lockutils [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] Lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.494976] env[69027]: DEBUG oslo_concurrency.lockutils [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] Lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.495155] env[69027]: DEBUG nova.compute.manager [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] No waiting events found dispatching network-vif-plugged-79d97c6a-c6c6-4ee8-b919-e9085e94bfbc {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1863.495316] env[69027]: WARNING nova.compute.manager [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Received unexpected event network-vif-plugged-79d97c6a-c6c6-4ee8-b919-e9085e94bfbc for instance with vm_state building and task_state spawning. [ 1863.495473] env[69027]: DEBUG nova.compute.manager [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Received event network-changed-79d97c6a-c6c6-4ee8-b919-e9085e94bfbc {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1863.495624] env[69027]: DEBUG nova.compute.manager [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Refreshing instance network info cache due to event network-changed-79d97c6a-c6c6-4ee8-b919-e9085e94bfbc. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1863.495801] env[69027]: DEBUG oslo_concurrency.lockutils [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] Acquiring lock "refresh_cache-f069ae93-e79f-4c89-99b8-f3ee70895ee6" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.495936] env[69027]: DEBUG oslo_concurrency.lockutils [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] Acquired lock "refresh_cache-f069ae93-e79f-4c89-99b8-f3ee70895ee6" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.496104] env[69027]: DEBUG nova.network.neutron [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Refreshing network info cache for port 79d97c6a-c6c6-4ee8-b919-e9085e94bfbc {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1863.503042] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1863.503042] env[69027]: value = "task-3395258" [ 1863.503042] env[69027]: _type = "Task" [ 1863.503042] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.512269] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395258, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.798840] env[69027]: DEBUG nova.network.neutron [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Updated VIF entry in instance network info cache for port 79d97c6a-c6c6-4ee8-b919-e9085e94bfbc. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1863.799220] env[69027]: DEBUG nova.network.neutron [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Updating instance_info_cache with network_info: [{"id": "79d97c6a-c6c6-4ee8-b919-e9085e94bfbc", "address": "fa:16:3e:3f:49:f1", "network": {"id": "c071ab13-b295-41c7-b89e-e6a7e7eb2ed2", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-170762747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947495558dff46eb9951fadfc3d12d32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79d97c6a-c6", "ovs_interfaceid": "79d97c6a-c6c6-4ee8-b919-e9085e94bfbc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.808350] env[69027]: DEBUG oslo_concurrency.lockutils [req-50a92ed3-8539-461a-94d3-23a109ec6a0f req-f8fcc32c-f398-473f-bf6a-f9de76fe139a service nova] Releasing lock "refresh_cache-f069ae93-e79f-4c89-99b8-f3ee70895ee6" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.012371] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395258, 'name': CreateVM_Task, 'duration_secs': 0.264327} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.012544] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1864.019752] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.019997] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.020239] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1864.020475] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9f51609-ad7f-4953-9751-e86b19d09078 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.024742] env[69027]: DEBUG oslo_vmware.api [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for the task: (returnval){ [ 1864.024742] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]526173da-4c36-78a4-9b0c-1f7e1f7a96eb" [ 1864.024742] env[69027]: _type = "Task" [ 1864.024742] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.032050] env[69027]: DEBUG oslo_vmware.api [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]526173da-4c36-78a4-9b0c-1f7e1f7a96eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.535372] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.535637] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1864.535849] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.771657] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1864.771827] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1864.771942] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1864.793607] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1864.793774] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1864.793898] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1864.794043] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1864.794180] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1864.794302] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1864.794421] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1864.794539] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1864.794656] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1864.794774] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1864.794893] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1867.771321] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1867.782509] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.782720] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.782887] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.783053] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1867.784144] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1305b3c-3314-4757-a21d-c854eaa85226 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.794244] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30feeef-9335-4b6c-87fe-b7599ea00e80 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.807653] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97a923d-899f-44f1-9a9c-051d16e08d14 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.813840] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e10f6b-4586-401d-8a8e-27cdbb8f0d0e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.842172] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180997MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1867.842327] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.842499] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.912649] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance b930e792-b0a8-45e4-9330-befac22182b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1867.912833] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1867.912918] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1867.913082] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1d3442ae-f46f-433d-bccb-f323463e3a21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1867.913205] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1867.913329] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1867.913441] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 01d7b088-73b4-4624-b013-2da51bf78767 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1867.913562] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance e379ca79-9458-464d-b07f-f651e474ebd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1867.913654] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1867.913775] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1867.925386] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1867.925619] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1867.925763] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1868.059346] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c673a377-a4cf-4113-9ac4-e13fa153e2ab {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.066656] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d11e67a-8d7e-4633-b4a5-25d9ea41574e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.097294] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ec2545-852f-4a31-a5b1-3a48de17a685 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.104298] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754c8b52-a160-4ff6-b99b-1184eaa68674 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.117064] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.125338] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1868.140891] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1868.141109] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.299s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.137010] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1869.137308] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1869.766573] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1869.787322] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1870.771359] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1871.771693] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1871.771693] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1872.772054] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1872.772054] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1910.048579] env[69027]: WARNING oslo_vmware.rw_handles [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1910.048579] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1910.048579] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1910.048579] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1910.048579] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1910.048579] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1910.048579] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1910.048579] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1910.048579] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1910.048579] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1910.048579] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1910.048579] env[69027]: ERROR oslo_vmware.rw_handles [ 1910.049420] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/3d82e268-d692-445f-a8d5-ae7d1d3799b8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1910.051422] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1910.051668] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Copying Virtual Disk [datastore2] vmware_temp/3d82e268-d692-445f-a8d5-ae7d1d3799b8/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/3d82e268-d692-445f-a8d5-ae7d1d3799b8/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1910.051967] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7665051c-b6e3-4eed-9be4-f85c153cd508 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.059871] env[69027]: DEBUG oslo_vmware.api [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for the task: (returnval){ [ 1910.059871] env[69027]: value = "task-3395259" [ 1910.059871] env[69027]: _type = "Task" [ 1910.059871] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.068010] env[69027]: DEBUG oslo_vmware.api [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Task: {'id': task-3395259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.570563] env[69027]: DEBUG oslo_vmware.exceptions [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1910.570834] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1910.571432] env[69027]: ERROR nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1910.571432] env[69027]: Faults: ['InvalidArgument'] [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] Traceback (most recent call last): [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] yield resources [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] self.driver.spawn(context, instance, image_meta, [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] self._fetch_image_if_missing(context, vi) [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] image_cache(vi, tmp_image_ds_loc) [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] vm_util.copy_virtual_disk( [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] session._wait_for_task(vmdk_copy_task) [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] return self.wait_for_task(task_ref) [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] return evt.wait() [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] result = hub.switch() [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] return self.greenlet.switch() [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] self.f(*self.args, **self.kw) [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] raise exceptions.translate_fault(task_info.error) [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] Faults: ['InvalidArgument'] [ 1910.571432] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] [ 1910.572641] env[69027]: INFO nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Terminating instance [ 1910.573354] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1910.573563] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1910.573814] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7df29348-75cb-40a5-a9aa-14b78b0908c1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.575884] env[69027]: DEBUG nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1910.576096] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1910.576826] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20281830-b63e-440d-b16f-4491cdafddbb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.583338] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1910.583571] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8191823-271d-4e20-9945-790047051b93 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.585665] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1910.585838] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1910.586784] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0b6a653-675e-455b-864c-51c3256ce80f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.591303] env[69027]: DEBUG oslo_vmware.api [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Waiting for the task: (returnval){ [ 1910.591303] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5290728e-3e7d-3984-c15b-0e67883451e5" [ 1910.591303] env[69027]: _type = "Task" [ 1910.591303] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.599308] env[69027]: DEBUG oslo_vmware.api [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5290728e-3e7d-3984-c15b-0e67883451e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.659603] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1910.659799] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1910.659979] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Deleting the datastore file [datastore2] b930e792-b0a8-45e4-9330-befac22182b7 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1910.660343] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8a417b6-65f9-4089-8c90-0b235058eef1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.666728] env[69027]: DEBUG oslo_vmware.api [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for the task: (returnval){ [ 1910.666728] env[69027]: value = "task-3395261" [ 1910.666728] env[69027]: _type = "Task" [ 1910.666728] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.674205] env[69027]: DEBUG oslo_vmware.api [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Task: {'id': task-3395261, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.101393] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1911.101764] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Creating directory with path [datastore2] vmware_temp/09dc28c4-54b3-4dab-bf75-c0a75f71c5d9/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1911.101814] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca436f97-2730-404b-b5e4-b850bc524627 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.112520] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Created directory with path [datastore2] vmware_temp/09dc28c4-54b3-4dab-bf75-c0a75f71c5d9/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1911.112771] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Fetch image to [datastore2] vmware_temp/09dc28c4-54b3-4dab-bf75-c0a75f71c5d9/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1911.112908] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/09dc28c4-54b3-4dab-bf75-c0a75f71c5d9/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1911.113588] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1585538a-f88e-460d-81e9-b6ac6ff082d0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.119565] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304d8c5a-7ca2-4c33-a2a4-38b2c223d2e9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.128442] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf271dc-5222-4c20-9fe6-7ba5241cb2f2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.158385] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576186f3-8c8f-4cda-87bf-ca942fb1406e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.163503] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1a2cec83-b70b-4fb1-8673-09db587d7ce5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.174472] env[69027]: DEBUG oslo_vmware.api [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Task: {'id': task-3395261, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068669} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.174720] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1911.174918] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1911.175161] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1911.175282] env[69027]: INFO nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1911.177364] env[69027]: DEBUG nova.compute.claims [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1911.177540] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.177758] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.190081] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1911.243996] env[69027]: DEBUG oslo_vmware.rw_handles [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/09dc28c4-54b3-4dab-bf75-c0a75f71c5d9/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1911.306442] env[69027]: DEBUG oslo_vmware.rw_handles [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1911.307078] env[69027]: DEBUG oslo_vmware.rw_handles [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/09dc28c4-54b3-4dab-bf75-c0a75f71c5d9/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1911.393730] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a821150e-5521-4eb1-b844-9975ea5830a9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.401384] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d62900-10bf-418e-9e80-5f1995f3683e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.432146] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b19e29d-d8d8-44e1-aafe-4fc2de2cf89e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.439189] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bb0e0e-6860-4830-8dd0-23821cecdf5b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.451927] env[69027]: DEBUG nova.compute.provider_tree [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1911.460389] env[69027]: DEBUG nova.scheduler.client.report [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1911.473935] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.474521] env[69027]: ERROR nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1911.474521] env[69027]: Faults: ['InvalidArgument'] [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] Traceback (most recent call last): [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] self.driver.spawn(context, instance, image_meta, [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] self._fetch_image_if_missing(context, vi) [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] image_cache(vi, tmp_image_ds_loc) [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] vm_util.copy_virtual_disk( [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] session._wait_for_task(vmdk_copy_task) [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] return self.wait_for_task(task_ref) [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] return evt.wait() [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] result = hub.switch() [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] return self.greenlet.switch() [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] self.f(*self.args, **self.kw) [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] raise exceptions.translate_fault(task_info.error) [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] Faults: ['InvalidArgument'] [ 1911.474521] env[69027]: ERROR nova.compute.manager [instance: b930e792-b0a8-45e4-9330-befac22182b7] [ 1911.475507] env[69027]: DEBUG nova.compute.utils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1911.476967] env[69027]: DEBUG nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Build of instance b930e792-b0a8-45e4-9330-befac22182b7 was re-scheduled: A specified parameter was not correct: fileType [ 1911.476967] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1911.477351] env[69027]: DEBUG nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1911.477528] env[69027]: DEBUG nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1911.477698] env[69027]: DEBUG nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1911.477864] env[69027]: DEBUG nova.network.neutron [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1911.771505] env[69027]: DEBUG nova.network.neutron [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.781894] env[69027]: INFO nova.compute.manager [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Took 0.30 seconds to deallocate network for instance. [ 1911.875956] env[69027]: INFO nova.scheduler.client.report [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Deleted allocations for instance b930e792-b0a8-45e4-9330-befac22182b7 [ 1911.900117] env[69027]: DEBUG oslo_concurrency.lockutils [None req-07f22d1d-d306-40b8-9351-6d6594908f7f tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "b930e792-b0a8-45e4-9330-befac22182b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 631.895s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.901303] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "b930e792-b0a8-45e4-9330-befac22182b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.973s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.901603] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Acquiring lock "b930e792-b0a8-45e4-9330-befac22182b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.901730] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "b930e792-b0a8-45e4-9330-befac22182b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.901895] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "b930e792-b0a8-45e4-9330-befac22182b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.903967] env[69027]: INFO nova.compute.manager [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Terminating instance [ 1911.906645] env[69027]: DEBUG nova.compute.manager [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1911.907046] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1911.907363] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cddd0345-97fa-4d0d-adcd-5f572d6b366a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.917519] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8aaabbe-36d6-4308-8e2b-18d1bd752047 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.929022] env[69027]: DEBUG nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1911.950359] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b930e792-b0a8-45e4-9330-befac22182b7 could not be found. [ 1911.950648] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1911.950864] env[69027]: INFO nova.compute.manager [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1911.951143] env[69027]: DEBUG oslo.service.loopingcall [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1911.951409] env[69027]: DEBUG nova.compute.manager [-] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1911.951511] env[69027]: DEBUG nova.network.neutron [-] [instance: b930e792-b0a8-45e4-9330-befac22182b7] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1911.975014] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.975269] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.976702] env[69027]: INFO nova.compute.claims [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1911.985767] env[69027]: DEBUG nova.network.neutron [-] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.995562] env[69027]: INFO nova.compute.manager [-] [instance: b930e792-b0a8-45e4-9330-befac22182b7] Took 0.04 seconds to deallocate network for instance. [ 1912.088841] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0ef1cf19-866a-4e89-bc39-8bdf11616264 tempest-AttachVolumeShelveTestJSON-474317084 tempest-AttachVolumeShelveTestJSON-474317084-project-member] Lock "b930e792-b0a8-45e4-9330-befac22182b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.090019] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "b930e792-b0a8-45e4-9330-befac22182b7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 166.622s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.090019] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: b930e792-b0a8-45e4-9330-befac22182b7] During sync_power_state the instance has a pending task (deleting). Skip. [ 1912.090201] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "b930e792-b0a8-45e4-9330-befac22182b7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.154481] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec0d0fc-0839-493d-ac17-450f41317245 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.162265] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8add0c70-3d23-4105-a6ec-c522550de679 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.191742] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9408b6-5edc-4674-9717-8c7255a37af2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.198651] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999f41ce-8637-4a05-b0d9-3bfe192de0f2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.212280] env[69027]: DEBUG nova.compute.provider_tree [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1912.222362] env[69027]: DEBUG nova.scheduler.client.report [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1912.237962] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.263s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.238503] env[69027]: DEBUG nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1912.273206] env[69027]: DEBUG nova.compute.utils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1912.274861] env[69027]: DEBUG nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1912.274861] env[69027]: DEBUG nova.network.neutron [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1912.284123] env[69027]: DEBUG nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1912.329942] env[69027]: DEBUG nova.policy [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4c7f278f8857481786002608a110697d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4c558fa59cdb4454957a8e7792365a47', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1912.352131] env[69027]: DEBUG nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1912.378548] env[69027]: DEBUG nova.virt.hardware [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1912.378801] env[69027]: DEBUG nova.virt.hardware [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1912.378963] env[69027]: DEBUG nova.virt.hardware [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1912.379177] env[69027]: DEBUG nova.virt.hardware [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1912.379369] env[69027]: DEBUG nova.virt.hardware [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1912.379527] env[69027]: DEBUG nova.virt.hardware [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1912.379755] env[69027]: DEBUG nova.virt.hardware [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1912.379990] env[69027]: DEBUG nova.virt.hardware [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1912.380219] env[69027]: DEBUG nova.virt.hardware [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1912.380439] env[69027]: DEBUG nova.virt.hardware [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1912.380624] env[69027]: DEBUG nova.virt.hardware [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1912.381526] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15551e4-70eb-4554-9ad8-eacdecfdb7f6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.389789] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de090780-04c7-4864-95ba-29f511290d3e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.702221] env[69027]: DEBUG nova.network.neutron [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Successfully created port: b14bbb55-b5ad-45da-9017-e779dccf7c5e {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1913.285458] env[69027]: DEBUG nova.network.neutron [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Successfully updated port: b14bbb55-b5ad-45da-9017-e779dccf7c5e {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1913.297864] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "refresh_cache-ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.298039] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquired lock "refresh_cache-ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.298204] env[69027]: DEBUG nova.network.neutron [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1913.338068] env[69027]: DEBUG nova.network.neutron [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1913.686416] env[69027]: DEBUG nova.network.neutron [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Updating instance_info_cache with network_info: [{"id": "b14bbb55-b5ad-45da-9017-e779dccf7c5e", "address": "fa:16:3e:ff:fc:35", "network": {"id": "ef5d0002-4d0c-452c-8eaf-fcbb3db325fc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-613479526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c558fa59cdb4454957a8e7792365a47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb14bbb55-b5", "ovs_interfaceid": "b14bbb55-b5ad-45da-9017-e779dccf7c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.700996] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Releasing lock "refresh_cache-ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.701433] env[69027]: DEBUG nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Instance network_info: |[{"id": "b14bbb55-b5ad-45da-9017-e779dccf7c5e", "address": "fa:16:3e:ff:fc:35", "network": {"id": "ef5d0002-4d0c-452c-8eaf-fcbb3db325fc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-613479526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c558fa59cdb4454957a8e7792365a47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb14bbb55-b5", "ovs_interfaceid": "b14bbb55-b5ad-45da-9017-e779dccf7c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1913.701750] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:fc:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0df968ae-c1ef-4009-a0f4-6f2e799c2fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b14bbb55-b5ad-45da-9017-e779dccf7c5e', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1913.709137] env[69027]: DEBUG oslo.service.loopingcall [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1913.709791] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1913.709791] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03deb435-3224-447d-b72f-db0a2ecf08f8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.730625] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1913.730625] env[69027]: value = "task-3395262" [ 1913.730625] env[69027]: _type = "Task" [ 1913.730625] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.739151] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395262, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.819359] env[69027]: DEBUG nova.compute.manager [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Received event network-vif-plugged-b14bbb55-b5ad-45da-9017-e779dccf7c5e {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1913.819618] env[69027]: DEBUG oslo_concurrency.lockutils [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] Acquiring lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.819837] env[69027]: DEBUG oslo_concurrency.lockutils [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] Lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.820018] env[69027]: DEBUG oslo_concurrency.lockutils [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] Lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.820221] env[69027]: DEBUG nova.compute.manager [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] No waiting events found dispatching network-vif-plugged-b14bbb55-b5ad-45da-9017-e779dccf7c5e {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1913.820395] env[69027]: WARNING nova.compute.manager [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Received unexpected event network-vif-plugged-b14bbb55-b5ad-45da-9017-e779dccf7c5e for instance with vm_state building and task_state spawning. [ 1913.820560] env[69027]: DEBUG nova.compute.manager [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Received event network-changed-b14bbb55-b5ad-45da-9017-e779dccf7c5e {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1913.820713] env[69027]: DEBUG nova.compute.manager [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Refreshing instance network info cache due to event network-changed-b14bbb55-b5ad-45da-9017-e779dccf7c5e. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1913.820893] env[69027]: DEBUG oslo_concurrency.lockutils [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] Acquiring lock "refresh_cache-ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.821042] env[69027]: DEBUG oslo_concurrency.lockutils [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] Acquired lock "refresh_cache-ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.821204] env[69027]: DEBUG nova.network.neutron [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Refreshing network info cache for port b14bbb55-b5ad-45da-9017-e779dccf7c5e {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1914.156344] env[69027]: DEBUG nova.network.neutron [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Updated VIF entry in instance network info cache for port b14bbb55-b5ad-45da-9017-e779dccf7c5e. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1914.156693] env[69027]: DEBUG nova.network.neutron [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Updating instance_info_cache with network_info: [{"id": "b14bbb55-b5ad-45da-9017-e779dccf7c5e", "address": "fa:16:3e:ff:fc:35", "network": {"id": "ef5d0002-4d0c-452c-8eaf-fcbb3db325fc", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-613479526-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4c558fa59cdb4454957a8e7792365a47", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0df968ae-c1ef-4009-a0f4-6f2e799c2fda", "external-id": "nsx-vlan-transportzone-864", "segmentation_id": 864, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb14bbb55-b5", "ovs_interfaceid": "b14bbb55-b5ad-45da-9017-e779dccf7c5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.167527] env[69027]: DEBUG oslo_concurrency.lockutils [req-74e7bba7-7b2d-4a50-b1c0-443032b3123b req-99648e8c-122f-4efb-8904-0f63794a5813 service nova] Releasing lock "refresh_cache-ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.243183] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395262, 'name': CreateVM_Task, 'duration_secs': 0.284976} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.243328] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1914.244014] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.244343] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.244520] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1914.244766] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68b3bb23-9f5b-499e-90df-429884fbbc52 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.249678] env[69027]: DEBUG oslo_vmware.api [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for the task: (returnval){ [ 1914.249678] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52788e5b-cdd0-68dc-2fc0-4eed27fb7572" [ 1914.249678] env[69027]: _type = "Task" [ 1914.249678] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.259028] env[69027]: DEBUG oslo_vmware.api [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52788e5b-cdd0-68dc-2fc0-4eed27fb7572, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.402876] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "22e27e0c-3cac-4794-b53a-4df7b8b92ec9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.403186] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "22e27e0c-3cac-4794-b53a-4df7b8b92ec9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.760477] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.760717] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1914.760926] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.772738] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1925.773171] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1925.773171] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1925.795068] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1925.795220] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1925.795349] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1925.795477] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1925.795600] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1925.795718] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1925.795838] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1925.795959] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1925.796091] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1925.796215] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1925.796335] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1929.771457] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1929.783244] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.783464] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.783628] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.783781] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1929.784920] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee408f34-48f7-44e1-b5fd-dff973794123 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.793813] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07cdc72a-b9a8-4430-a900-f72a039673f3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.807294] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb343509-2ecf-4352-81b0-ce26887e6376 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.813313] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7a9e21-38c1-44b2-9900-16bd6b2c0067 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.842585] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180988MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1929.842708] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.842897] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.911433] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1929.911590] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1929.911714] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1d3442ae-f46f-433d-bccb-f323463e3a21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1929.911835] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1929.911952] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1929.912085] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 01d7b088-73b4-4624-b013-2da51bf78767 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1929.912201] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance e379ca79-9458-464d-b07f-f651e474ebd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1929.912314] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1929.912430] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1929.912544] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1929.922574] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 22e27e0c-3cac-4794-b53a-4df7b8b92ec9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1929.922787] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1929.922934] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1930.043662] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23f5c64-9600-4f9a-8ce3-2394aed97ea3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.050685] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc30266-549c-4a7c-b048-71150c5d53f5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.079012] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771b93ba-5500-46c0-9e13-eecd52bedea8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.085521] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04194177-7573-4559-9a12-446d9a88eacf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.097830] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1930.106020] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1930.119880] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1930.120096] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.277s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.116199] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1931.116578] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1931.116792] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1931.771178] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1932.771764] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1933.771752] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1934.772052] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1934.772052] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1959.696749] env[69027]: WARNING oslo_vmware.rw_handles [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1959.696749] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1959.696749] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1959.696749] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1959.696749] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1959.696749] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 1959.696749] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1959.696749] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1959.696749] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1959.696749] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1959.696749] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1959.696749] env[69027]: ERROR oslo_vmware.rw_handles [ 1959.697525] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/09dc28c4-54b3-4dab-bf75-c0a75f71c5d9/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1959.700492] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1959.700492] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Copying Virtual Disk [datastore2] vmware_temp/09dc28c4-54b3-4dab-bf75-c0a75f71c5d9/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/09dc28c4-54b3-4dab-bf75-c0a75f71c5d9/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1959.700492] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-daa76af2-321f-4b3b-b48e-637eb10bac84 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.711074] env[69027]: DEBUG oslo_vmware.api [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Waiting for the task: (returnval){ [ 1959.711074] env[69027]: value = "task-3395263" [ 1959.711074] env[69027]: _type = "Task" [ 1959.711074] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.719184] env[69027]: DEBUG oslo_vmware.api [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Task: {'id': task-3395263, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.220259] env[69027]: DEBUG oslo_vmware.exceptions [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1960.220562] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.221142] env[69027]: ERROR nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1960.221142] env[69027]: Faults: ['InvalidArgument'] [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Traceback (most recent call last): [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] yield resources [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] self.driver.spawn(context, instance, image_meta, [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] self._fetch_image_if_missing(context, vi) [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] image_cache(vi, tmp_image_ds_loc) [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] vm_util.copy_virtual_disk( [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] session._wait_for_task(vmdk_copy_task) [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] return self.wait_for_task(task_ref) [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] return evt.wait() [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] result = hub.switch() [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] return self.greenlet.switch() [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] self.f(*self.args, **self.kw) [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] raise exceptions.translate_fault(task_info.error) [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Faults: ['InvalidArgument'] [ 1960.221142] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] [ 1960.222386] env[69027]: INFO nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Terminating instance [ 1960.223649] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.223649] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1960.223649] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd12dcf1-6de7-4e12-b3b3-fad20e8e7788 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.225878] env[69027]: DEBUG nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1960.226080] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1960.226803] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5320827f-f005-4ec5-9244-6ebb91ca4c6b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.233130] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1960.233329] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-427f88d9-2c86-4bcd-a4da-d1f6d8832009 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.235335] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1960.235507] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1960.236431] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2da4eae-9659-4439-87f3-9dc20813a613 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.241160] env[69027]: DEBUG oslo_vmware.api [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Waiting for the task: (returnval){ [ 1960.241160] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]523a2f95-4e30-5b58-492c-98d7454c7109" [ 1960.241160] env[69027]: _type = "Task" [ 1960.241160] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.250449] env[69027]: DEBUG oslo_vmware.api [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]523a2f95-4e30-5b58-492c-98d7454c7109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.299921] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1960.301045] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1960.301045] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Deleting the datastore file [datastore2] 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1960.301045] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d08fc61-8e25-42dd-8f59-c4675e015daa {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.306408] env[69027]: DEBUG oslo_vmware.api [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Waiting for the task: (returnval){ [ 1960.306408] env[69027]: value = "task-3395265" [ 1960.306408] env[69027]: _type = "Task" [ 1960.306408] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.313847] env[69027]: DEBUG oslo_vmware.api [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Task: {'id': task-3395265, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.751206] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1960.751494] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Creating directory with path [datastore2] vmware_temp/b5cff130-03d5-4e0e-805a-4af1b11428f3/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1960.751708] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1046dcf-2b46-4fa5-818e-d6e45c3570eb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.762622] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Created directory with path [datastore2] vmware_temp/b5cff130-03d5-4e0e-805a-4af1b11428f3/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1960.762816] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Fetch image to [datastore2] vmware_temp/b5cff130-03d5-4e0e-805a-4af1b11428f3/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1960.762987] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/b5cff130-03d5-4e0e-805a-4af1b11428f3/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1960.763716] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae4e8bc-65ed-441c-a82d-3bd637164a04 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.770088] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abb398a-8963-4ba4-8411-9c9e4915f8a6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.778747] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be226da-cf17-4724-9133-3558297d9195 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.811860] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92de48c4-4f35-4b9e-8ee2-e237d048f830 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.521339] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5659de63-f287-412e-afe6-a412a762dad7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.523391] env[69027]: DEBUG oslo_vmware.api [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Task: {'id': task-3395265, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065521} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.523641] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1961.523822] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1961.523988] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1961.524181] env[69027]: INFO nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1961.526244] env[69027]: DEBUG nova.compute.claims [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1961.526410] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.526643] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1961.545930] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1961.599202] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5cff130-03d5-4e0e-805a-4af1b11428f3/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1961.660391] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1961.660612] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5cff130-03d5-4e0e-805a-4af1b11428f3/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1961.745324] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2563f574-bbb0-404e-af16-8a87d81d8c77 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.752514] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff56522-0253-4bb9-b696-591817d24f31 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.781070] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac6c7bd-efaf-4f77-b26f-1bfb935029f2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.787724] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c375645d-2c88-41d0-a37b-1e2c684e0776 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.800135] env[69027]: DEBUG nova.compute.provider_tree [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1961.808487] env[69027]: DEBUG nova.scheduler.client.report [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1961.821295] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.295s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1961.821827] env[69027]: ERROR nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1961.821827] env[69027]: Faults: ['InvalidArgument'] [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Traceback (most recent call last): [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] self.driver.spawn(context, instance, image_meta, [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] self._fetch_image_if_missing(context, vi) [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] image_cache(vi, tmp_image_ds_loc) [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] vm_util.copy_virtual_disk( [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] session._wait_for_task(vmdk_copy_task) [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] return self.wait_for_task(task_ref) [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] return evt.wait() [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] result = hub.switch() [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] return self.greenlet.switch() [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] self.f(*self.args, **self.kw) [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] raise exceptions.translate_fault(task_info.error) [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Faults: ['InvalidArgument'] [ 1961.821827] env[69027]: ERROR nova.compute.manager [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] [ 1961.822978] env[69027]: DEBUG nova.compute.utils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1961.823903] env[69027]: DEBUG nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Build of instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 was re-scheduled: A specified parameter was not correct: fileType [ 1961.823903] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1961.824301] env[69027]: DEBUG nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1961.824502] env[69027]: DEBUG nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1961.824667] env[69027]: DEBUG nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1961.824827] env[69027]: DEBUG nova.network.neutron [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1962.167082] env[69027]: DEBUG nova.network.neutron [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1962.178986] env[69027]: INFO nova.compute.manager [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Took 0.35 seconds to deallocate network for instance. [ 1962.283018] env[69027]: INFO nova.scheduler.client.report [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Deleted allocations for instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 [ 1962.302692] env[69027]: DEBUG oslo_concurrency.lockutils [None req-73532641-e23d-4e5f-a929-355a91bcd178 tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 575.932s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.303879] env[69027]: DEBUG oslo_concurrency.lockutils [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 379.158s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.303879] env[69027]: DEBUG oslo_concurrency.lockutils [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Acquiring lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.304094] env[69027]: DEBUG oslo_concurrency.lockutils [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.304165] env[69027]: DEBUG oslo_concurrency.lockutils [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.306341] env[69027]: INFO nova.compute.manager [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Terminating instance [ 1962.308073] env[69027]: DEBUG nova.compute.manager [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1962.308269] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1962.308733] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f0194ff-e075-4c55-9535-bfba133a53b5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.313846] env[69027]: DEBUG nova.compute.manager [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1962.320232] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5471576e-b0f8-4b66-b967-9dfb5d7a50b1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.348418] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77 could not be found. [ 1962.348610] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1962.348807] env[69027]: INFO nova.compute.manager [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1962.349107] env[69027]: DEBUG oslo.service.loopingcall [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1962.351313] env[69027]: DEBUG nova.compute.manager [-] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1962.351413] env[69027]: DEBUG nova.network.neutron [-] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1962.364821] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.365070] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.366537] env[69027]: INFO nova.compute.claims [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1962.376901] env[69027]: DEBUG nova.network.neutron [-] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1962.386352] env[69027]: INFO nova.compute.manager [-] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] Took 0.03 seconds to deallocate network for instance. [ 1962.469047] env[69027]: DEBUG oslo_concurrency.lockutils [None req-18130c2c-4951-4daa-919c-d2a4a92d1ece tempest-ServerAddressesNegativeTestJSON-905200605 tempest-ServerAddressesNegativeTestJSON-905200605-project-member] Lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.165s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.469919] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 217.002s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.470126] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77] During sync_power_state the instance has a pending task (deleting). Skip. [ 1962.470303] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "4a9a9e2d-c5f8-4cfe-86a3-61a38f950b77" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.539445] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03391893-7dc4-40c8-a090-602824d233f0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.547091] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7c354e-5b72-4920-9189-631eb0823a10 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.576783] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b8c341-9862-4802-9168-e421d3d8301e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.583833] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665d00a6-ff24-4383-8898-76e21c19b5b0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.596324] env[69027]: DEBUG nova.compute.provider_tree [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1962.604854] env[69027]: DEBUG nova.scheduler.client.report [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1962.619245] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.254s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.619675] env[69027]: DEBUG nova.compute.manager [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1962.650236] env[69027]: DEBUG nova.compute.utils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1962.651589] env[69027]: DEBUG nova.compute.manager [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1962.651761] env[69027]: DEBUG nova.network.neutron [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1962.662216] env[69027]: DEBUG nova.compute.manager [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1962.712933] env[69027]: DEBUG nova.policy [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9cae5394177466e9afb1f8fa26e15ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ed36a72c2994c47a7313f7bbb37640a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 1962.730243] env[69027]: DEBUG nova.compute.manager [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1962.759677] env[69027]: DEBUG nova.virt.hardware [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1962.760035] env[69027]: DEBUG nova.virt.hardware [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1962.760088] env[69027]: DEBUG nova.virt.hardware [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1962.760271] env[69027]: DEBUG nova.virt.hardware [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1962.760442] env[69027]: DEBUG nova.virt.hardware [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1962.760605] env[69027]: DEBUG nova.virt.hardware [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1962.760827] env[69027]: DEBUG nova.virt.hardware [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1962.760993] env[69027]: DEBUG nova.virt.hardware [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1962.761176] env[69027]: DEBUG nova.virt.hardware [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1962.761340] env[69027]: DEBUG nova.virt.hardware [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1962.761538] env[69027]: DEBUG nova.virt.hardware [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1962.762520] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac15368d-cfb3-413c-86d6-c5df45eab4e3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.771098] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db8ccef-30ff-479f-8da1-3d671a149e9c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.022737] env[69027]: DEBUG nova.network.neutron [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Successfully created port: 69309ee1-d0d9-47b0-9409-7c81a2a5a11b {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1963.236252] env[69027]: DEBUG oslo_concurrency.lockutils [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquiring lock "e379ca79-9458-464d-b07f-f651e474ebd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.622299] env[69027]: DEBUG nova.network.neutron [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Successfully updated port: 69309ee1-d0d9-47b0-9409-7c81a2a5a11b {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1963.636623] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "refresh_cache-22e27e0c-3cac-4794-b53a-4df7b8b92ec9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1963.636783] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired lock "refresh_cache-22e27e0c-3cac-4794-b53a-4df7b8b92ec9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1963.636950] env[69027]: DEBUG nova.network.neutron [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1963.678247] env[69027]: DEBUG nova.network.neutron [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1963.869864] env[69027]: DEBUG nova.network.neutron [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Updating instance_info_cache with network_info: [{"id": "69309ee1-d0d9-47b0-9409-7c81a2a5a11b", "address": "fa:16:3e:f2:e8:a9", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69309ee1-d0", "ovs_interfaceid": "69309ee1-d0d9-47b0-9409-7c81a2a5a11b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.880823] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Releasing lock "refresh_cache-22e27e0c-3cac-4794-b53a-4df7b8b92ec9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1963.881132] env[69027]: DEBUG nova.compute.manager [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Instance network_info: |[{"id": "69309ee1-d0d9-47b0-9409-7c81a2a5a11b", "address": "fa:16:3e:f2:e8:a9", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69309ee1-d0", "ovs_interfaceid": "69309ee1-d0d9-47b0-9409-7c81a2a5a11b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1963.881553] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:e8:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69309ee1-d0d9-47b0-9409-7c81a2a5a11b', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1963.888936] env[69027]: DEBUG oslo.service.loopingcall [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1963.889452] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1963.889688] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bbcc6d15-41c5-4abf-8973-81dc76eaed64 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.909780] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1963.909780] env[69027]: value = "task-3395266" [ 1963.909780] env[69027]: _type = "Task" [ 1963.909780] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.918562] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395266, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.398294] env[69027]: DEBUG nova.compute.manager [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Received event network-vif-plugged-69309ee1-d0d9-47b0-9409-7c81a2a5a11b {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1964.398294] env[69027]: DEBUG oslo_concurrency.lockutils [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] Acquiring lock "22e27e0c-3cac-4794-b53a-4df7b8b92ec9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1964.398294] env[69027]: DEBUG oslo_concurrency.lockutils [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] Lock "22e27e0c-3cac-4794-b53a-4df7b8b92ec9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.398294] env[69027]: DEBUG oslo_concurrency.lockutils [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] Lock "22e27e0c-3cac-4794-b53a-4df7b8b92ec9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.398294] env[69027]: DEBUG nova.compute.manager [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] No waiting events found dispatching network-vif-plugged-69309ee1-d0d9-47b0-9409-7c81a2a5a11b {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1964.398294] env[69027]: WARNING nova.compute.manager [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Received unexpected event network-vif-plugged-69309ee1-d0d9-47b0-9409-7c81a2a5a11b for instance with vm_state building and task_state spawning. [ 1964.398552] env[69027]: DEBUG nova.compute.manager [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Received event network-changed-69309ee1-d0d9-47b0-9409-7c81a2a5a11b {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1964.398620] env[69027]: DEBUG nova.compute.manager [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Refreshing instance network info cache due to event network-changed-69309ee1-d0d9-47b0-9409-7c81a2a5a11b. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1964.398804] env[69027]: DEBUG oslo_concurrency.lockutils [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] Acquiring lock "refresh_cache-22e27e0c-3cac-4794-b53a-4df7b8b92ec9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.398940] env[69027]: DEBUG oslo_concurrency.lockutils [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] Acquired lock "refresh_cache-22e27e0c-3cac-4794-b53a-4df7b8b92ec9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.399106] env[69027]: DEBUG nova.network.neutron [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Refreshing network info cache for port 69309ee1-d0d9-47b0-9409-7c81a2a5a11b {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1964.420101] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395266, 'name': CreateVM_Task, 'duration_secs': 0.313401} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.420250] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1964.420893] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.421067] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.421378] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1964.421613] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cf94c59-76d1-4042-a8fe-9e25612f3764 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.425740] env[69027]: DEBUG oslo_vmware.api [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for the task: (returnval){ [ 1964.425740] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52b932d8-c957-c5f7-678f-0c6a1c393346" [ 1964.425740] env[69027]: _type = "Task" [ 1964.425740] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.433055] env[69027]: DEBUG oslo_vmware.api [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52b932d8-c957-c5f7-678f-0c6a1c393346, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.637806] env[69027]: DEBUG nova.network.neutron [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Updated VIF entry in instance network info cache for port 69309ee1-d0d9-47b0-9409-7c81a2a5a11b. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1964.638180] env[69027]: DEBUG nova.network.neutron [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Updating instance_info_cache with network_info: [{"id": "69309ee1-d0d9-47b0-9409-7c81a2a5a11b", "address": "fa:16:3e:f2:e8:a9", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69309ee1-d0", "ovs_interfaceid": "69309ee1-d0d9-47b0-9409-7c81a2a5a11b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1964.647436] env[69027]: DEBUG oslo_concurrency.lockutils [req-25f6d81d-7f4c-4cc0-9393-de84dd7e4549 req-1e5ce1fd-2dc0-4bed-bcbf-0ffad8272934 service nova] Releasing lock "refresh_cache-22e27e0c-3cac-4794-b53a-4df7b8b92ec9" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1964.935754] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1964.936157] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1964.936262] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1986.772479] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1986.772761] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1986.772790] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1986.795417] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1986.795582] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1986.795714] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1986.795847] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1986.795970] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1986.796102] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1986.796227] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1986.796345] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1986.796463] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1986.796579] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1986.796696] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1990.771409] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1991.772403] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1991.784031] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.784031] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.784031] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.784214] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1991.785319] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4792c1-ab1b-4724-9feb-2dedc703b652 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.795132] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e38e6cf-e294-4abd-a516-9c237b924206 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.808470] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16aea646-f181-4bf9-89a7-ed4ebd405d10 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.814418] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cd5a61-a13b-44f2-8d31-92ffa107545e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.842225] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180991MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1991.842361] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.842544] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.912681] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1991.912840] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1d3442ae-f46f-433d-bccb-f323463e3a21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1991.912971] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1991.913108] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1991.913228] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 01d7b088-73b4-4624-b013-2da51bf78767 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1991.913344] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance e379ca79-9458-464d-b07f-f651e474ebd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1991.913460] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1991.913575] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1991.913691] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1991.913806] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 22e27e0c-3cac-4794-b53a-4df7b8b92ec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1991.913990] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1991.914137] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1992.030079] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975356a6-b8bc-4f40-8d4f-99ce995021c8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.037558] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25f803d-f766-4122-b3ec-93d2a0551d6a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.067017] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b367e124-8568-447d-b5d9-aa3e24ac55e9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.073648] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec60905-8565-479d-a791-aa7fd69b5919 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.085931] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1992.093946] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1992.107597] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1992.107773] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.265s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.264644] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquiring lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.103559] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1993.103822] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1993.103987] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1993.771324] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.767807] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1995.771510] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1995.771868] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1995.771986] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2009.460674] env[69027]: WARNING oslo_vmware.rw_handles [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2009.460674] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2009.460674] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2009.460674] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2009.460674] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2009.460674] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 2009.460674] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2009.460674] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2009.460674] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2009.460674] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2009.460674] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2009.460674] env[69027]: ERROR oslo_vmware.rw_handles [ 2009.461416] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/b5cff130-03d5-4e0e-805a-4af1b11428f3/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2009.463269] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2009.463561] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Copying Virtual Disk [datastore2] vmware_temp/b5cff130-03d5-4e0e-805a-4af1b11428f3/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/b5cff130-03d5-4e0e-805a-4af1b11428f3/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2009.463891] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02a88288-3a9f-47e3-9e67-21194b0b2afe {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.472135] env[69027]: DEBUG oslo_vmware.api [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Waiting for the task: (returnval){ [ 2009.472135] env[69027]: value = "task-3395267" [ 2009.472135] env[69027]: _type = "Task" [ 2009.472135] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.480382] env[69027]: DEBUG oslo_vmware.api [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Task: {'id': task-3395267, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.982209] env[69027]: DEBUG oslo_vmware.exceptions [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2009.982538] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.983135] env[69027]: ERROR nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2009.983135] env[69027]: Faults: ['InvalidArgument'] [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Traceback (most recent call last): [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] yield resources [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] self.driver.spawn(context, instance, image_meta, [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] self._fetch_image_if_missing(context, vi) [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] image_cache(vi, tmp_image_ds_loc) [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] vm_util.copy_virtual_disk( [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] session._wait_for_task(vmdk_copy_task) [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] return self.wait_for_task(task_ref) [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] return evt.wait() [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] result = hub.switch() [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] return self.greenlet.switch() [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] self.f(*self.args, **self.kw) [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] raise exceptions.translate_fault(task_info.error) [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Faults: ['InvalidArgument'] [ 2009.983135] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] [ 2009.983826] env[69027]: INFO nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Terminating instance [ 2009.985332] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2009.985332] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2009.985509] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43d7b9b0-5388-46a3-ad01-0452ab9267c4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.987631] env[69027]: DEBUG nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2009.987820] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2009.988546] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11938b88-d188-402f-bb18-0f01072009cc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.996372] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2009.996580] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9f429f7-18da-4556-aad9-300e90f124c0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.998665] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2009.998801] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2009.999703] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22fc8f94-0fe7-478b-99b9-bd7745444b87 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.004268] env[69027]: DEBUG oslo_vmware.api [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for the task: (returnval){ [ 2010.004268] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52edecc9-8991-0841-347e-0a6731eae152" [ 2010.004268] env[69027]: _type = "Task" [ 2010.004268] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.014447] env[69027]: DEBUG oslo_vmware.api [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52edecc9-8991-0841-347e-0a6731eae152, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.069059] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2010.069059] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2010.069059] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Deleting the datastore file [datastore2] d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2010.069059] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7858349-c146-4546-a8b8-5d1fe38b11a5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.073821] env[69027]: DEBUG oslo_vmware.api [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Waiting for the task: (returnval){ [ 2010.073821] env[69027]: value = "task-3395269" [ 2010.073821] env[69027]: _type = "Task" [ 2010.073821] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.082053] env[69027]: DEBUG oslo_vmware.api [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Task: {'id': task-3395269, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.515069] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2010.515069] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Creating directory with path [datastore2] vmware_temp/648b2d8a-ddaa-45ce-897f-226fc1c3e753/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2010.515069] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-541ced35-1a46-464e-ba31-637b663595d5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.526302] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Created directory with path [datastore2] vmware_temp/648b2d8a-ddaa-45ce-897f-226fc1c3e753/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2010.526509] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Fetch image to [datastore2] vmware_temp/648b2d8a-ddaa-45ce-897f-226fc1c3e753/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2010.526679] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/648b2d8a-ddaa-45ce-897f-226fc1c3e753/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2010.527449] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f018c7f3-72fa-48da-b12d-77c6c3ecf778 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.534351] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e84fb2d-c5fd-466f-9b4c-f1937139807f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.543297] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2da4a7-c723-4034-a13c-bd25488845ab {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.573437] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5a95b1-40ee-480d-a08c-631f87dc2436 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.583885] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ed6184a6-0242-4da2-bcd0-effb6e234ec7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.585478] env[69027]: DEBUG oslo_vmware.api [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Task: {'id': task-3395269, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064522} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.585713] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2010.585895] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2010.586148] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2010.586258] env[69027]: INFO nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2010.588384] env[69027]: DEBUG nova.compute.claims [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2010.588586] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.588803] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.606464] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2010.658523] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/648b2d8a-ddaa-45ce-897f-226fc1c3e753/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2010.719531] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2010.719712] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/648b2d8a-ddaa-45ce-897f-226fc1c3e753/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2010.809826] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17282531-f9aa-48cc-85f6-c934f0c4caf0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.817593] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37caf6ab-aeb0-415e-92e9-1ea31cda3f9a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.846402] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625b88e2-0c7d-4597-bc0b-5d066c2af617 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.853400] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f7003e-3c08-4298-9587-f5896c792ad1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.865997] env[69027]: DEBUG nova.compute.provider_tree [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2010.874141] env[69027]: DEBUG nova.scheduler.client.report [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2010.887957] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.299s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.888499] env[69027]: ERROR nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2010.888499] env[69027]: Faults: ['InvalidArgument'] [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Traceback (most recent call last): [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] self.driver.spawn(context, instance, image_meta, [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] self._fetch_image_if_missing(context, vi) [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] image_cache(vi, tmp_image_ds_loc) [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] vm_util.copy_virtual_disk( [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] session._wait_for_task(vmdk_copy_task) [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] return self.wait_for_task(task_ref) [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] return evt.wait() [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] result = hub.switch() [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] return self.greenlet.switch() [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] self.f(*self.args, **self.kw) [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] raise exceptions.translate_fault(task_info.error) [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Faults: ['InvalidArgument'] [ 2010.888499] env[69027]: ERROR nova.compute.manager [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] [ 2010.889275] env[69027]: DEBUG nova.compute.utils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2010.890613] env[69027]: DEBUG nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Build of instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 was re-scheduled: A specified parameter was not correct: fileType [ 2010.890613] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2010.891017] env[69027]: DEBUG nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2010.891198] env[69027]: DEBUG nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2010.891375] env[69027]: DEBUG nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2010.891541] env[69027]: DEBUG nova.network.neutron [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2011.287964] env[69027]: DEBUG nova.network.neutron [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.300085] env[69027]: INFO nova.compute.manager [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Took 0.41 seconds to deallocate network for instance. [ 2011.399048] env[69027]: INFO nova.scheduler.client.report [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Deleted allocations for instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 [ 2011.419134] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c614a1ea-37f1-4bd3-9b20-d744e2d2d814 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.657s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.420661] env[69027]: DEBUG oslo_concurrency.lockutils [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 420.209s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.420661] env[69027]: DEBUG oslo_concurrency.lockutils [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Acquiring lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.420661] env[69027]: DEBUG oslo_concurrency.lockutils [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.420661] env[69027]: DEBUG oslo_concurrency.lockutils [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.422031] env[69027]: INFO nova.compute.manager [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Terminating instance [ 2011.423702] env[69027]: DEBUG nova.compute.manager [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2011.423893] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2011.424379] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb21d238-d7c4-4108-adca-c01ae3a6a675 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.434728] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a86b199-4741-4091-86c4-a497a8c3ce84 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.462510] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d3b9542c-3bad-4ea5-86c4-5254dc7a3b28 could not be found. [ 2011.462704] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2011.462891] env[69027]: INFO nova.compute.manager [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2011.463171] env[69027]: DEBUG oslo.service.loopingcall [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2011.463399] env[69027]: DEBUG nova.compute.manager [-] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2011.463493] env[69027]: DEBUG nova.network.neutron [-] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2011.493228] env[69027]: DEBUG nova.network.neutron [-] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.501243] env[69027]: INFO nova.compute.manager [-] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] Took 0.04 seconds to deallocate network for instance. [ 2011.593913] env[69027]: DEBUG oslo_concurrency.lockutils [None req-67508683-b4f1-4792-8ffc-77cc482ac298 tempest-ServersTestJSON-2145480274 tempest-ServersTestJSON-2145480274-project-member] Lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.594692] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 266.127s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.594888] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: d3b9542c-3bad-4ea5-86c4-5254dc7a3b28] During sync_power_state the instance has a pending task (deleting). Skip. [ 2011.596025] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "d3b9542c-3bad-4ea5-86c4-5254dc7a3b28" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.683401] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Acquiring lock "c78bd74b-1d1b-46bc-9fd8-a553f23e6671" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.683699] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Lock "c78bd74b-1d1b-46bc-9fd8-a553f23e6671" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.699981] env[69027]: DEBUG nova.compute.manager [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2031.748170] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.748428] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.749950] env[69027]: INFO nova.compute.claims [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2031.908553] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a1fb80-dae8-4268-819a-7ccf78ef9cd6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.916129] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835e57b7-48d8-4579-9b17-fa1f4cbb42fe {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.946390] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0351ef9f-db35-4225-b79d-382480948d6a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.953928] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf2927c-e87b-459b-a2f7-185c71f06bfd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.966869] env[69027]: DEBUG nova.compute.provider_tree [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2031.975574] env[69027]: DEBUG nova.scheduler.client.report [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2031.989180] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.241s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.989669] env[69027]: DEBUG nova.compute.manager [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2032.033331] env[69027]: DEBUG nova.compute.utils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2032.034666] env[69027]: DEBUG nova.compute.manager [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2032.034841] env[69027]: DEBUG nova.network.neutron [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2032.046036] env[69027]: DEBUG nova.compute.manager [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2032.102685] env[69027]: DEBUG nova.policy [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96e5fe634520480e81e948bf54587ab3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f76faea002341ada326856e96a8d493', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 2032.134166] env[69027]: DEBUG nova.compute.manager [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2032.162985] env[69027]: DEBUG nova.virt.hardware [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2032.163254] env[69027]: DEBUG nova.virt.hardware [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2032.163436] env[69027]: DEBUG nova.virt.hardware [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2032.163691] env[69027]: DEBUG nova.virt.hardware [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2032.163861] env[69027]: DEBUG nova.virt.hardware [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2032.164055] env[69027]: DEBUG nova.virt.hardware [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2032.164295] env[69027]: DEBUG nova.virt.hardware [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2032.164460] env[69027]: DEBUG nova.virt.hardware [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2032.164628] env[69027]: DEBUG nova.virt.hardware [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2032.164788] env[69027]: DEBUG nova.virt.hardware [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2032.164959] env[69027]: DEBUG nova.virt.hardware [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2032.165814] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e61dd4-21c3-4e65-a83b-91ff84b58205 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.174297] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73addfa-9c03-47f5-bf88-aa97792f610e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.703718] env[69027]: DEBUG nova.network.neutron [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Successfully created port: 146cb1d0-2df1-4fd2-becd-63352347aae7 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2033.257102] env[69027]: DEBUG nova.compute.manager [req-99be1e8f-a72c-4b61-bd0c-3bcb8bedf279 req-e433101f-f157-4813-a605-cd06b23f6272 service nova] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Received event network-vif-plugged-146cb1d0-2df1-4fd2-becd-63352347aae7 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2033.257309] env[69027]: DEBUG oslo_concurrency.lockutils [req-99be1e8f-a72c-4b61-bd0c-3bcb8bedf279 req-e433101f-f157-4813-a605-cd06b23f6272 service nova] Acquiring lock "c78bd74b-1d1b-46bc-9fd8-a553f23e6671-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.257529] env[69027]: DEBUG oslo_concurrency.lockutils [req-99be1e8f-a72c-4b61-bd0c-3bcb8bedf279 req-e433101f-f157-4813-a605-cd06b23f6272 service nova] Lock "c78bd74b-1d1b-46bc-9fd8-a553f23e6671-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.257707] env[69027]: DEBUG oslo_concurrency.lockutils [req-99be1e8f-a72c-4b61-bd0c-3bcb8bedf279 req-e433101f-f157-4813-a605-cd06b23f6272 service nova] Lock "c78bd74b-1d1b-46bc-9fd8-a553f23e6671-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.257880] env[69027]: DEBUG nova.compute.manager [req-99be1e8f-a72c-4b61-bd0c-3bcb8bedf279 req-e433101f-f157-4813-a605-cd06b23f6272 service nova] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] No waiting events found dispatching network-vif-plugged-146cb1d0-2df1-4fd2-becd-63352347aae7 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2033.258055] env[69027]: WARNING nova.compute.manager [req-99be1e8f-a72c-4b61-bd0c-3bcb8bedf279 req-e433101f-f157-4813-a605-cd06b23f6272 service nova] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Received unexpected event network-vif-plugged-146cb1d0-2df1-4fd2-becd-63352347aae7 for instance with vm_state building and task_state spawning. [ 2033.347757] env[69027]: DEBUG nova.network.neutron [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Successfully updated port: 146cb1d0-2df1-4fd2-becd-63352347aae7 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2033.362854] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Acquiring lock "refresh_cache-c78bd74b-1d1b-46bc-9fd8-a553f23e6671" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2033.363061] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Acquired lock "refresh_cache-c78bd74b-1d1b-46bc-9fd8-a553f23e6671" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2033.363301] env[69027]: DEBUG nova.network.neutron [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2033.402658] env[69027]: DEBUG nova.network.neutron [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2033.567611] env[69027]: DEBUG nova.network.neutron [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Updating instance_info_cache with network_info: [{"id": "146cb1d0-2df1-4fd2-becd-63352347aae7", "address": "fa:16:3e:1e:32:b9", "network": {"id": "c7c3452c-7376-402a-b3c7-e6c74acd3ed3", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1896608645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f76faea002341ada326856e96a8d493", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap146cb1d0-2d", "ovs_interfaceid": "146cb1d0-2df1-4fd2-becd-63352347aae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2033.580044] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Releasing lock "refresh_cache-c78bd74b-1d1b-46bc-9fd8-a553f23e6671" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2033.580323] env[69027]: DEBUG nova.compute.manager [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Instance network_info: |[{"id": "146cb1d0-2df1-4fd2-becd-63352347aae7", "address": "fa:16:3e:1e:32:b9", "network": {"id": "c7c3452c-7376-402a-b3c7-e6c74acd3ed3", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1896608645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f76faea002341ada326856e96a8d493", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap146cb1d0-2d", "ovs_interfaceid": "146cb1d0-2df1-4fd2-becd-63352347aae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2033.580729] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:32:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '146cb1d0-2df1-4fd2-becd-63352347aae7', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2033.588199] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Creating folder: Project (9f76faea002341ada326856e96a8d493). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2033.588674] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aca23515-bc9e-4796-8eb5-eac64f83ad50 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.600520] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Created folder: Project (9f76faea002341ada326856e96a8d493) in parent group-v677321. [ 2033.600704] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Creating folder: Instances. Parent ref: group-v677426. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2033.600919] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d274768-aec5-4a87-b82a-edaef1827236 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.609556] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Created folder: Instances in parent group-v677426. [ 2033.609774] env[69027]: DEBUG oslo.service.loopingcall [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2033.609941] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2033.610143] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b01834bc-7e68-4226-a765-543b6ff0da3f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.629038] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2033.629038] env[69027]: value = "task-3395272" [ 2033.629038] env[69027]: _type = "Task" [ 2033.629038] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.635747] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395272, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.138475] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395272, 'name': CreateVM_Task, 'duration_secs': 0.27894} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.138774] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2034.139313] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2034.139477] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2034.140113] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2034.140113] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eefe9989-c575-4944-89e4-c1f7ae3ae09d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.144058] env[69027]: DEBUG oslo_vmware.api [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Waiting for the task: (returnval){ [ 2034.144058] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5274d39c-d00a-b8c2-61f0-a207f5734f3c" [ 2034.144058] env[69027]: _type = "Task" [ 2034.144058] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.155856] env[69027]: DEBUG oslo_vmware.api [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5274d39c-d00a-b8c2-61f0-a207f5734f3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.654412] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.654692] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2034.654904] env[69027]: DEBUG oslo_concurrency.lockutils [None req-74f52f0b-ff52-463a-80cf-888ba0f29ede tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.288218] env[69027]: DEBUG nova.compute.manager [req-eaff3528-2e26-445c-b0e7-a30aeaccd0f5 req-8a423b3c-1b1b-4f51-9a05-cfda71c9d678 service nova] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Received event network-changed-146cb1d0-2df1-4fd2-becd-63352347aae7 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2035.288459] env[69027]: DEBUG nova.compute.manager [req-eaff3528-2e26-445c-b0e7-a30aeaccd0f5 req-8a423b3c-1b1b-4f51-9a05-cfda71c9d678 service nova] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Refreshing instance network info cache due to event network-changed-146cb1d0-2df1-4fd2-becd-63352347aae7. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2035.288629] env[69027]: DEBUG oslo_concurrency.lockutils [req-eaff3528-2e26-445c-b0e7-a30aeaccd0f5 req-8a423b3c-1b1b-4f51-9a05-cfda71c9d678 service nova] Acquiring lock "refresh_cache-c78bd74b-1d1b-46bc-9fd8-a553f23e6671" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.288776] env[69027]: DEBUG oslo_concurrency.lockutils [req-eaff3528-2e26-445c-b0e7-a30aeaccd0f5 req-8a423b3c-1b1b-4f51-9a05-cfda71c9d678 service nova] Acquired lock "refresh_cache-c78bd74b-1d1b-46bc-9fd8-a553f23e6671" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.288935] env[69027]: DEBUG nova.network.neutron [req-eaff3528-2e26-445c-b0e7-a30aeaccd0f5 req-8a423b3c-1b1b-4f51-9a05-cfda71c9d678 service nova] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Refreshing network info cache for port 146cb1d0-2df1-4fd2-becd-63352347aae7 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2035.577198] env[69027]: DEBUG nova.network.neutron [req-eaff3528-2e26-445c-b0e7-a30aeaccd0f5 req-8a423b3c-1b1b-4f51-9a05-cfda71c9d678 service nova] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Updated VIF entry in instance network info cache for port 146cb1d0-2df1-4fd2-becd-63352347aae7. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2035.577575] env[69027]: DEBUG nova.network.neutron [req-eaff3528-2e26-445c-b0e7-a30aeaccd0f5 req-8a423b3c-1b1b-4f51-9a05-cfda71c9d678 service nova] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Updating instance_info_cache with network_info: [{"id": "146cb1d0-2df1-4fd2-becd-63352347aae7", "address": "fa:16:3e:1e:32:b9", "network": {"id": "c7c3452c-7376-402a-b3c7-e6c74acd3ed3", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1896608645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f76faea002341ada326856e96a8d493", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap146cb1d0-2d", "ovs_interfaceid": "146cb1d0-2df1-4fd2-becd-63352347aae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2035.587095] env[69027]: DEBUG oslo_concurrency.lockutils [req-eaff3528-2e26-445c-b0e7-a30aeaccd0f5 req-8a423b3c-1b1b-4f51-9a05-cfda71c9d678 service nova] Releasing lock "refresh_cache-c78bd74b-1d1b-46bc-9fd8-a553f23e6671" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.919838] env[69027]: DEBUG oslo_concurrency.lockutils [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.273729] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.772167] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.772428] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2046.772506] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2046.792579] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2046.792732] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2046.792856] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2046.792986] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2046.793128] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2046.793249] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2046.793370] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2046.793493] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2046.793650] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2046.793778] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2046.793898] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2050.771701] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2052.767513] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2052.771072] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2052.782564] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2052.782774] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2052.782946] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2052.783113] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2052.784383] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76b445a-f8ea-4c44-8fea-fee6623e6f9d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.793153] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbeeb377-5b82-4c53-81e0-1400ce01e7f6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.806687] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fff63c5-b8d9-4b0c-855e-1ffb850d0d4e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.812703] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732845b9-39c4-4d8b-8130-0530ec0e6f2d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.841877] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180934MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2052.842040] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2052.842237] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2052.940589] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1d3442ae-f46f-433d-bccb-f323463e3a21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2052.940753] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2052.940878] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2052.940998] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 01d7b088-73b4-4624-b013-2da51bf78767 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2052.941132] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance e379ca79-9458-464d-b07f-f651e474ebd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2052.941307] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2052.941428] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2052.941546] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2052.941654] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 22e27e0c-3cac-4794-b53a-4df7b8b92ec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2052.941768] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c78bd74b-1d1b-46bc-9fd8-a553f23e6671 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2052.941965] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2052.942123] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2053.060433] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe1ef19-0daa-4184-a521-344802c88d2a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.068156] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa4fc9c-b679-40f4-9170-6cbd6b1ba727 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.096795] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78454cf-bfa4-42a6-b06d-84a2d106e80d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.103402] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332759aa-3b29-4022-89c0-0aab2012fb5e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.115720] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2053.125419] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2053.139461] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2053.139605] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.297s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.140582] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2054.771179] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2054.771484] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2055.771851] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2056.797656] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2056.798079] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2056.798312] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2059.478050] env[69027]: WARNING oslo_vmware.rw_handles [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2059.478050] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2059.478050] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2059.478050] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2059.478050] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2059.478050] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 2059.478050] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2059.478050] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2059.478050] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2059.478050] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2059.478050] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2059.478050] env[69027]: ERROR oslo_vmware.rw_handles [ 2059.478050] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/648b2d8a-ddaa-45ce-897f-226fc1c3e753/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2059.479763] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2059.480040] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Copying Virtual Disk [datastore2] vmware_temp/648b2d8a-ddaa-45ce-897f-226fc1c3e753/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/648b2d8a-ddaa-45ce-897f-226fc1c3e753/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2059.480337] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4afdc469-cbd3-4a07-a7a3-12ad5131a1a3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.489288] env[69027]: DEBUG oslo_vmware.api [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for the task: (returnval){ [ 2059.489288] env[69027]: value = "task-3395273" [ 2059.489288] env[69027]: _type = "Task" [ 2059.489288] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.496617] env[69027]: DEBUG oslo_vmware.api [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Task: {'id': task-3395273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.999286] env[69027]: DEBUG oslo_vmware.exceptions [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2059.999561] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2060.000129] env[69027]: ERROR nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2060.000129] env[69027]: Faults: ['InvalidArgument'] [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Traceback (most recent call last): [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] yield resources [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] self.driver.spawn(context, instance, image_meta, [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] self._fetch_image_if_missing(context, vi) [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] image_cache(vi, tmp_image_ds_loc) [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] vm_util.copy_virtual_disk( [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] session._wait_for_task(vmdk_copy_task) [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] return self.wait_for_task(task_ref) [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] return evt.wait() [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] result = hub.switch() [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] return self.greenlet.switch() [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] self.f(*self.args, **self.kw) [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] raise exceptions.translate_fault(task_info.error) [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Faults: ['InvalidArgument'] [ 2060.000129] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] [ 2060.000918] env[69027]: INFO nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Terminating instance [ 2060.002058] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2060.002275] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2060.002520] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3680764c-83f3-4441-8cb2-c1671eefba2c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.004660] env[69027]: DEBUG nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2060.004858] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2060.005568] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9baace1-ec64-430f-a9a4-b9d63f6a2d6d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.012317] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2060.012521] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d0965771-c310-4aaa-a36d-2761c64d43bb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.014675] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2060.014842] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2060.015753] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcb7c87a-731c-4b77-a01d-b7d377b8c9ff {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.020175] env[69027]: DEBUG oslo_vmware.api [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for the task: (returnval){ [ 2060.020175] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52cb4efc-1325-e343-d1af-05921a176d4e" [ 2060.020175] env[69027]: _type = "Task" [ 2060.020175] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.029845] env[69027]: DEBUG oslo_vmware.api [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52cb4efc-1325-e343-d1af-05921a176d4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.086131] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2060.087086] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2060.087086] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Deleting the datastore file [datastore2] 1d3442ae-f46f-433d-bccb-f323463e3a21 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2060.087086] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92cd5bf0-09a7-4716-927c-4f1195ed0a98 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.092728] env[69027]: DEBUG oslo_vmware.api [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for the task: (returnval){ [ 2060.092728] env[69027]: value = "task-3395275" [ 2060.092728] env[69027]: _type = "Task" [ 2060.092728] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.100677] env[69027]: DEBUG oslo_vmware.api [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Task: {'id': task-3395275, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.531035] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2060.531358] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Creating directory with path [datastore2] vmware_temp/2d7421f8-2a5a-41f6-a65e-337ee284f0b7/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2060.531496] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-725e46be-9a97-42e3-8fff-e465201a9456 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.543533] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Created directory with path [datastore2] vmware_temp/2d7421f8-2a5a-41f6-a65e-337ee284f0b7/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2060.543734] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Fetch image to [datastore2] vmware_temp/2d7421f8-2a5a-41f6-a65e-337ee284f0b7/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2060.543911] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/2d7421f8-2a5a-41f6-a65e-337ee284f0b7/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2060.544632] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb85dfe-94f1-4222-a5e6-f1f35c00356e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.550807] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f63e01-2e55-4735-a7d3-a7dda5718233 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.559616] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6993cd79-d3d6-457a-812c-a3608c194216 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.590192] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8272841c-a970-4f40-926f-a30a7321f664 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.597982] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a33c89c2-f57c-4108-9b24-27a9d0bc5777 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.602040] env[69027]: DEBUG oslo_vmware.api [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Task: {'id': task-3395275, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07704} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.602555] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2060.602744] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2060.602916] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2060.603102] env[69027]: INFO nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2060.605156] env[69027]: DEBUG nova.compute.claims [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2060.605329] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.605545] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.620849] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2060.676676] env[69027]: DEBUG oslo_vmware.rw_handles [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2d7421f8-2a5a-41f6-a65e-337ee284f0b7/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2060.736191] env[69027]: DEBUG oslo_vmware.rw_handles [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2060.737032] env[69027]: DEBUG oslo_vmware.rw_handles [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2d7421f8-2a5a-41f6-a65e-337ee284f0b7/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2060.821857] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd0ee58-de4e-4ea9-adf2-d5663a28447e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.829439] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b262debc-883e-441f-b83b-172bcd9bffc6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.858741] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c673c14c-2e88-4206-a4d2-b40b47acc321 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.865351] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1455b3af-b6dc-4f71-941a-e6bae991403a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.877756] env[69027]: DEBUG nova.compute.provider_tree [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2060.886145] env[69027]: DEBUG nova.scheduler.client.report [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2060.902122] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.902646] env[69027]: ERROR nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2060.902646] env[69027]: Faults: ['InvalidArgument'] [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Traceback (most recent call last): [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] self.driver.spawn(context, instance, image_meta, [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] self._fetch_image_if_missing(context, vi) [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] image_cache(vi, tmp_image_ds_loc) [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] vm_util.copy_virtual_disk( [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] session._wait_for_task(vmdk_copy_task) [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] return self.wait_for_task(task_ref) [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] return evt.wait() [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] result = hub.switch() [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] return self.greenlet.switch() [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] self.f(*self.args, **self.kw) [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] raise exceptions.translate_fault(task_info.error) [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Faults: ['InvalidArgument'] [ 2060.902646] env[69027]: ERROR nova.compute.manager [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] [ 2060.903522] env[69027]: DEBUG nova.compute.utils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2060.904730] env[69027]: DEBUG nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Build of instance 1d3442ae-f46f-433d-bccb-f323463e3a21 was re-scheduled: A specified parameter was not correct: fileType [ 2060.904730] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2060.905148] env[69027]: DEBUG nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2060.905326] env[69027]: DEBUG nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2060.905500] env[69027]: DEBUG nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2060.905665] env[69027]: DEBUG nova.network.neutron [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2061.167254] env[69027]: DEBUG nova.network.neutron [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.180059] env[69027]: INFO nova.compute.manager [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Took 0.27 seconds to deallocate network for instance. [ 2061.272183] env[69027]: INFO nova.scheduler.client.report [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Deleted allocations for instance 1d3442ae-f46f-433d-bccb-f323463e3a21 [ 2061.306026] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c63b5c94-91be-43ad-87dc-1a2454c74519 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "1d3442ae-f46f-433d-bccb-f323463e3a21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 611.873s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.307289] env[69027]: DEBUG oslo_concurrency.lockutils [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "1d3442ae-f46f-433d-bccb-f323463e3a21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 415.614s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.307575] env[69027]: DEBUG oslo_concurrency.lockutils [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "1d3442ae-f46f-433d-bccb-f323463e3a21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.307790] env[69027]: DEBUG oslo_concurrency.lockutils [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "1d3442ae-f46f-433d-bccb-f323463e3a21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.307962] env[69027]: DEBUG oslo_concurrency.lockutils [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "1d3442ae-f46f-433d-bccb-f323463e3a21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.311053] env[69027]: INFO nova.compute.manager [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Terminating instance [ 2061.312932] env[69027]: DEBUG nova.compute.manager [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2061.314023] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2061.314023] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-299c8b08-d9e3-4f96-9375-e088a36bed3b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.323094] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a9bb4f-fbbe-4e4a-9e9a-2d19fae7aacd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.351068] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1d3442ae-f46f-433d-bccb-f323463e3a21 could not be found. [ 2061.351316] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2061.351535] env[69027]: INFO nova.compute.manager [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2061.351777] env[69027]: DEBUG oslo.service.loopingcall [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2061.352011] env[69027]: DEBUG nova.compute.manager [-] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2061.352114] env[69027]: DEBUG nova.network.neutron [-] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2061.385578] env[69027]: DEBUG nova.network.neutron [-] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2061.394023] env[69027]: INFO nova.compute.manager [-] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] Took 0.04 seconds to deallocate network for instance. [ 2061.481674] env[69027]: DEBUG oslo_concurrency.lockutils [None req-43511bf5-26a8-413d-b028-a6a30bc2798e tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "1d3442ae-f46f-433d-bccb-f323463e3a21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.482611] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "1d3442ae-f46f-433d-bccb-f323463e3a21" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 316.015s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.482828] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1d3442ae-f46f-433d-bccb-f323463e3a21] During sync_power_state the instance has a pending task (deleting). Skip. [ 2061.483016] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "1d3442ae-f46f-433d-bccb-f323463e3a21" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.771693] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2064.771945] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances with incomplete migration {{(pid=69027) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2069.781058] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2069.781058] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2069.790017] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] There are 0 instances to clean {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2108.781730] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2108.782135] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2108.782135] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2108.802248] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2108.802404] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2108.802537] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2108.802666] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2108.802790] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2108.802910] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2108.803053] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2108.803175] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2108.803294] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2108.803411] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2109.495678] env[69027]: WARNING oslo_vmware.rw_handles [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2109.495678] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2109.495678] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2109.495678] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2109.495678] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2109.495678] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 2109.495678] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2109.495678] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2109.495678] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2109.495678] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2109.495678] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2109.495678] env[69027]: ERROR oslo_vmware.rw_handles [ 2109.496063] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/2d7421f8-2a5a-41f6-a65e-337ee284f0b7/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2109.498063] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2109.498207] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Copying Virtual Disk [datastore2] vmware_temp/2d7421f8-2a5a-41f6-a65e-337ee284f0b7/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/2d7421f8-2a5a-41f6-a65e-337ee284f0b7/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2109.498494] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26c36c2c-e5cd-4639-ae97-8edd5a8af9a0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.507308] env[69027]: DEBUG oslo_vmware.api [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for the task: (returnval){ [ 2109.507308] env[69027]: value = "task-3395276" [ 2109.507308] env[69027]: _type = "Task" [ 2109.507308] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.515218] env[69027]: DEBUG oslo_vmware.api [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Task: {'id': task-3395276, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.018341] env[69027]: DEBUG oslo_vmware.exceptions [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2110.018749] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2110.019217] env[69027]: ERROR nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2110.019217] env[69027]: Faults: ['InvalidArgument'] [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Traceback (most recent call last): [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] yield resources [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] self.driver.spawn(context, instance, image_meta, [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] self._fetch_image_if_missing(context, vi) [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] image_cache(vi, tmp_image_ds_loc) [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] vm_util.copy_virtual_disk( [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] session._wait_for_task(vmdk_copy_task) [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] return self.wait_for_task(task_ref) [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] return evt.wait() [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] result = hub.switch() [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] return self.greenlet.switch() [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] self.f(*self.args, **self.kw) [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] raise exceptions.translate_fault(task_info.error) [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Faults: ['InvalidArgument'] [ 2110.019217] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] [ 2110.020113] env[69027]: INFO nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Terminating instance [ 2110.021150] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2110.021371] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2110.021604] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9d31ae2-b860-4c1d-a7fd-a1e11818911b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.023846] env[69027]: DEBUG nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2110.024055] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2110.024787] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cca5b4-b7c6-4aac-b530-a67477742eb1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.031434] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2110.031649] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d2cec440-960e-44b4-9c88-3199683718ae {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.033754] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2110.033887] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2110.034834] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da401b13-e0d9-413a-8705-82f5fbbd6dd1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.039159] env[69027]: DEBUG oslo_vmware.api [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for the task: (returnval){ [ 2110.039159] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]5232b812-70a1-46d1-e81c-a95f543a73d7" [ 2110.039159] env[69027]: _type = "Task" [ 2110.039159] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.046924] env[69027]: DEBUG oslo_vmware.api [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]5232b812-70a1-46d1-e81c-a95f543a73d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.095575] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2110.095823] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2110.096029] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Deleting the datastore file [datastore2] 1cbeaaaf-7915-47ee-be61-52f8e05403d9 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2110.096291] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d35a276-2a50-497c-b4a2-52e42e1446ab {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.102166] env[69027]: DEBUG oslo_vmware.api [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for the task: (returnval){ [ 2110.102166] env[69027]: value = "task-3395278" [ 2110.102166] env[69027]: _type = "Task" [ 2110.102166] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.109526] env[69027]: DEBUG oslo_vmware.api [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Task: {'id': task-3395278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.515906] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0821dcfb-2912-45ce-8d88-a482e1e849cb tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "22e27e0c-3cac-4794-b53a-4df7b8b92ec9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.549630] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2110.549900] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Creating directory with path [datastore2] vmware_temp/47506cac-67f5-40d8-9e90-3aa1d7eda33a/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2110.550151] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ab9e972-286d-43fb-bec8-39fe4ed02de7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.560578] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Created directory with path [datastore2] vmware_temp/47506cac-67f5-40d8-9e90-3aa1d7eda33a/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2110.560778] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Fetch image to [datastore2] vmware_temp/47506cac-67f5-40d8-9e90-3aa1d7eda33a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2110.560951] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/47506cac-67f5-40d8-9e90-3aa1d7eda33a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2110.561725] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1c7f12-a694-4aaf-b3f4-a45ee65683b4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.568254] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95365868-04f2-4569-be62-bc60fcf9fa4a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.578576] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c05d92c7-5e87-495f-a267-548e708da939 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.611561] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce00b56b-d7e3-475b-ae1f-3ec363681c12 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.619753] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e5e52dd5-236d-4122-b674-ad52c328d44c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.621394] env[69027]: DEBUG oslo_vmware.api [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Task: {'id': task-3395278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07573} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.621634] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2110.621815] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2110.622016] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2110.622222] env[69027]: INFO nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2110.624336] env[69027]: DEBUG nova.compute.claims [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2110.624510] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.624720] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2110.645899] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2110.677747] env[69027]: DEBUG nova.scheduler.client.report [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Refreshing inventories for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2110.692249] env[69027]: DEBUG nova.scheduler.client.report [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Updating ProviderTree inventory for provider 4923c91f-3b2b-4ad1-a821-36209acae639 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2110.692488] env[69027]: DEBUG nova.compute.provider_tree [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Updating inventory in ProviderTree for provider 4923c91f-3b2b-4ad1-a821-36209acae639 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2110.696173] env[69027]: DEBUG oslo_vmware.rw_handles [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/47506cac-67f5-40d8-9e90-3aa1d7eda33a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2110.754630] env[69027]: DEBUG nova.scheduler.client.report [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Refreshing aggregate associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, aggregates: None {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2110.760169] env[69027]: DEBUG oslo_vmware.rw_handles [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2110.760169] env[69027]: DEBUG oslo_vmware.rw_handles [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/47506cac-67f5-40d8-9e90-3aa1d7eda33a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2110.773567] env[69027]: DEBUG nova.scheduler.client.report [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Refreshing trait associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2110.875932] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2683b640-8db1-4d0d-8169-ba668982e789 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.883367] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c251d110-5f40-45f8-85cf-63944f6355d3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.913301] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3e9fed-d04d-4656-bf83-36900a95dc1b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.919655] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce3259d-aab1-4c70-b11f-43d78c753c0a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.932088] env[69027]: DEBUG nova.compute.provider_tree [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2110.940322] env[69027]: DEBUG nova.scheduler.client.report [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2110.954778] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.330s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.955356] env[69027]: ERROR nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2110.955356] env[69027]: Faults: ['InvalidArgument'] [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Traceback (most recent call last): [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] self.driver.spawn(context, instance, image_meta, [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] self._fetch_image_if_missing(context, vi) [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] image_cache(vi, tmp_image_ds_loc) [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] vm_util.copy_virtual_disk( [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] session._wait_for_task(vmdk_copy_task) [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] return self.wait_for_task(task_ref) [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] return evt.wait() [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] result = hub.switch() [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] return self.greenlet.switch() [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] self.f(*self.args, **self.kw) [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] raise exceptions.translate_fault(task_info.error) [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Faults: ['InvalidArgument'] [ 2110.955356] env[69027]: ERROR nova.compute.manager [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] [ 2110.956047] env[69027]: DEBUG nova.compute.utils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2110.957435] env[69027]: DEBUG nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Build of instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 was re-scheduled: A specified parameter was not correct: fileType [ 2110.957435] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2110.957798] env[69027]: DEBUG nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2110.957972] env[69027]: DEBUG nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2110.958181] env[69027]: DEBUG nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2110.958378] env[69027]: DEBUG nova.network.neutron [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2111.220135] env[69027]: DEBUG nova.network.neutron [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2111.230488] env[69027]: INFO nova.compute.manager [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Took 0.27 seconds to deallocate network for instance. [ 2111.334416] env[69027]: INFO nova.scheduler.client.report [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Deleted allocations for instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 [ 2111.354669] env[69027]: DEBUG oslo_concurrency.lockutils [None req-235a261a-37f3-47f4-b8c7-6683d8a52186 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 653.355s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.354669] env[69027]: DEBUG oslo_concurrency.lockutils [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 457.929s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.354900] env[69027]: DEBUG oslo_concurrency.lockutils [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2111.355114] env[69027]: DEBUG oslo_concurrency.lockutils [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.355292] env[69027]: DEBUG oslo_concurrency.lockutils [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.357183] env[69027]: INFO nova.compute.manager [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Terminating instance [ 2111.359265] env[69027]: DEBUG nova.compute.manager [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2111.359464] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2111.359927] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a160875-e828-485a-8b4c-a972640bb4b0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.369437] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c50939-63c9-4593-8ade-62c2b3aabb19 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.398500] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1cbeaaaf-7915-47ee-be61-52f8e05403d9 could not be found. [ 2111.398740] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2111.398993] env[69027]: INFO nova.compute.manager [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2111.399309] env[69027]: DEBUG oslo.service.loopingcall [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2111.399570] env[69027]: DEBUG nova.compute.manager [-] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2111.399711] env[69027]: DEBUG nova.network.neutron [-] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2111.421508] env[69027]: DEBUG nova.network.neutron [-] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2111.429535] env[69027]: INFO nova.compute.manager [-] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] Took 0.03 seconds to deallocate network for instance. [ 2111.508234] env[69027]: DEBUG oslo_concurrency.lockutils [None req-611a8bb3-ae83-4fef-b685-c1570eba986d tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.153s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.509079] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 366.041s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.509311] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 1cbeaaaf-7915-47ee-be61-52f8e05403d9] During sync_power_state the instance has a pending task (deleting). Skip. [ 2111.509449] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "1cbeaaaf-7915-47ee-be61-52f8e05403d9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.770783] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.770838] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.782709] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.782935] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.783150] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.783312] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2112.784773] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ee59f8-cddf-439f-a018-864c8e9678a5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.796397] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7032d0-506c-4ab1-a904-7a88491355b2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.810371] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75071415-a640-46c4-9b43-1351dc279366 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.816948] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5fd0a0-131e-4cc5-94a0-106727bf3a23 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.846436] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180993MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2112.846436] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.846436] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.906337] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2112.906504] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 01d7b088-73b4-4624-b013-2da51bf78767 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2112.906634] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance e379ca79-9458-464d-b07f-f651e474ebd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2112.906757] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2112.906897] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2112.907029] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2112.907156] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 22e27e0c-3cac-4794-b53a-4df7b8b92ec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2112.907270] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c78bd74b-1d1b-46bc-9fd8-a553f23e6671 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2112.907450] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2112.907586] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2113.009033] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d58c63-3ed6-4cf9-b5f4-9ef6eda5965d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.017715] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b056ff-5708-4693-a026-f1fa07cadab6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.047013] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf9c9e4-d3b0-42b7-804f-05a3ddd85129 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.053679] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c08db71-4dae-47ba-a44a-fcbd355af899 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.066104] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2113.074257] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2113.087422] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2113.087600] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.242s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2115.083663] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2115.083940] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2115.102535] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2115.771692] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2116.771776] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2117.772137] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2118.770955] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2118.771136] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2159.383895] env[69027]: WARNING oslo_vmware.rw_handles [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2159.383895] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2159.383895] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2159.383895] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2159.383895] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2159.383895] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 2159.383895] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2159.383895] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2159.383895] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2159.383895] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2159.383895] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2159.383895] env[69027]: ERROR oslo_vmware.rw_handles [ 2159.384550] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/47506cac-67f5-40d8-9e90-3aa1d7eda33a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2159.386244] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2159.386491] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Copying Virtual Disk [datastore2] vmware_temp/47506cac-67f5-40d8-9e90-3aa1d7eda33a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/47506cac-67f5-40d8-9e90-3aa1d7eda33a/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2159.386797] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8743849-0dcc-4c93-b0a7-e5c9d4069996 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.396230] env[69027]: DEBUG oslo_vmware.api [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for the task: (returnval){ [ 2159.396230] env[69027]: value = "task-3395279" [ 2159.396230] env[69027]: _type = "Task" [ 2159.396230] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.404375] env[69027]: DEBUG oslo_vmware.api [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': task-3395279, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.906493] env[69027]: DEBUG oslo_vmware.exceptions [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2159.906785] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2159.907344] env[69027]: ERROR nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2159.907344] env[69027]: Faults: ['InvalidArgument'] [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Traceback (most recent call last): [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] yield resources [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] self.driver.spawn(context, instance, image_meta, [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] self._fetch_image_if_missing(context, vi) [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] image_cache(vi, tmp_image_ds_loc) [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] vm_util.copy_virtual_disk( [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] session._wait_for_task(vmdk_copy_task) [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] return self.wait_for_task(task_ref) [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] return evt.wait() [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] result = hub.switch() [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] return self.greenlet.switch() [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] self.f(*self.args, **self.kw) [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] raise exceptions.translate_fault(task_info.error) [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Faults: ['InvalidArgument'] [ 2159.907344] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] [ 2159.908302] env[69027]: INFO nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Terminating instance [ 2159.909260] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2159.909482] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2159.909725] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-668c91a3-80c8-4d05-b54f-75cae58ae542 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.911920] env[69027]: DEBUG nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2159.912129] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2159.912868] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b6468a-dec4-4961-9eff-2b603edbae6a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.919943] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2159.920146] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1853ab64-df43-434d-a93a-8990e9c24082 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.922185] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2159.922359] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2159.923319] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99fdc8b3-966e-4e07-82be-2bf352f9f6f0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.927748] env[69027]: DEBUG oslo_vmware.api [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for the task: (returnval){ [ 2159.927748] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52553908-3f8b-f81a-36ff-bc1e291f74d0" [ 2159.927748] env[69027]: _type = "Task" [ 2159.927748] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.940123] env[69027]: DEBUG oslo_vmware.api [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52553908-3f8b-f81a-36ff-bc1e291f74d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.986670] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2159.986898] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2159.987096] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Deleting the datastore file [datastore2] 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2159.987377] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30cee5ab-3bc8-4993-bb32-f532556a0fe4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.993599] env[69027]: DEBUG oslo_vmware.api [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for the task: (returnval){ [ 2159.993599] env[69027]: value = "task-3395281" [ 2159.993599] env[69027]: _type = "Task" [ 2159.993599] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.000985] env[69027]: DEBUG oslo_vmware.api [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': task-3395281, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.438331] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2160.438615] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Creating directory with path [datastore2] vmware_temp/17f364a6-b333-45a3-ad3a-d279f7ba8518/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2160.438830] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2bc306f-5ded-415b-89f3-7d3dad85f9dd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.450926] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Created directory with path [datastore2] vmware_temp/17f364a6-b333-45a3-ad3a-d279f7ba8518/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2160.451154] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Fetch image to [datastore2] vmware_temp/17f364a6-b333-45a3-ad3a-d279f7ba8518/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2160.451336] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/17f364a6-b333-45a3-ad3a-d279f7ba8518/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2160.452109] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2ce2d7-a63e-49b4-ba00-425da56b63e7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.458635] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc8600e-bb5a-4ca0-af63-c50dcc885bd8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.467973] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896e1b6d-e54f-475c-acfc-0cd6ad655c4f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.501457] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a901a3f1-8422-4952-9fc2-0da7a9d60ddc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.509378] env[69027]: DEBUG oslo_vmware.api [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': task-3395281, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071694} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.510820] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2160.511027] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2160.511207] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2160.511379] env[69027]: INFO nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2160.513185] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-20202a40-5d62-41c1-a95f-d42395844598 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.515802] env[69027]: DEBUG nova.compute.claims [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2160.515979] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2160.516212] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2160.533428] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2160.671782] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448a420f-df59-45ba-ab1e-63936478ebbc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.679030] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d47316-f379-4d96-b86d-58f9048b12ec {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.712520] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e534d692-6400-4c88-943c-12ff633e770e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.719790] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a872462e-5de1-4b3d-b728-143008d23c05 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.734604] env[69027]: DEBUG nova.compute.provider_tree [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2160.743178] env[69027]: DEBUG nova.scheduler.client.report [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2160.757129] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.241s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.757658] env[69027]: ERROR nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2160.757658] env[69027]: Faults: ['InvalidArgument'] [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Traceback (most recent call last): [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] self.driver.spawn(context, instance, image_meta, [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] self._fetch_image_if_missing(context, vi) [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] image_cache(vi, tmp_image_ds_loc) [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] vm_util.copy_virtual_disk( [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] session._wait_for_task(vmdk_copy_task) [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] return self.wait_for_task(task_ref) [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] return evt.wait() [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] result = hub.switch() [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] return self.greenlet.switch() [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] self.f(*self.args, **self.kw) [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] raise exceptions.translate_fault(task_info.error) [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Faults: ['InvalidArgument'] [ 2160.757658] env[69027]: ERROR nova.compute.manager [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] [ 2160.758426] env[69027]: DEBUG nova.compute.utils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2160.760119] env[69027]: DEBUG nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Build of instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 was re-scheduled: A specified parameter was not correct: fileType [ 2160.760119] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2160.760497] env[69027]: DEBUG nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2160.760676] env[69027]: DEBUG nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2160.760874] env[69027]: DEBUG nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2160.761058] env[69027]: DEBUG nova.network.neutron [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2160.779027] env[69027]: DEBUG oslo_vmware.rw_handles [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/17f364a6-b333-45a3-ad3a-d279f7ba8518/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2160.839446] env[69027]: DEBUG oslo_vmware.rw_handles [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2160.839693] env[69027]: DEBUG oslo_vmware.rw_handles [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/17f364a6-b333-45a3-ad3a-d279f7ba8518/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2161.159303] env[69027]: DEBUG nova.network.neutron [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2161.173788] env[69027]: INFO nova.compute.manager [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Took 0.41 seconds to deallocate network for instance. [ 2161.269689] env[69027]: INFO nova.scheduler.client.report [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Deleted allocations for instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 [ 2161.294442] env[69027]: DEBUG oslo_concurrency.lockutils [None req-b7a4fef9-0377-4c54-8572-ac13ab85f596 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 639.495s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2161.294726] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 443.395s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2161.295359] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2161.295508] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2161.295700] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2161.297985] env[69027]: INFO nova.compute.manager [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Terminating instance [ 2161.300037] env[69027]: DEBUG nova.compute.manager [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2161.300271] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2161.300816] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ca5e0b63-2763-4272-ad20-51cd21fd3658 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.312415] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1f5775-b312-44f6-a443-bcf0930fbc49 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.339702] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38 could not be found. [ 2161.339904] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2161.340094] env[69027]: INFO nova.compute.manager [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2161.340339] env[69027]: DEBUG oslo.service.loopingcall [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2161.340563] env[69027]: DEBUG nova.compute.manager [-] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2161.340662] env[69027]: DEBUG nova.network.neutron [-] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2161.363201] env[69027]: DEBUG nova.network.neutron [-] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2161.370914] env[69027]: INFO nova.compute.manager [-] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] Took 0.03 seconds to deallocate network for instance. [ 2161.454184] env[69027]: DEBUG oslo_concurrency.lockutils [None req-bb473807-2036-4ca7-81be-8b3760ea262b tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2161.455024] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 415.987s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2161.455202] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 3319ce56-bd7c-40c5-b6f5-01fccfb8bc38] During sync_power_state the instance has a pending task (deleting). Skip. [ 2161.455377] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "3319ce56-bd7c-40c5-b6f5-01fccfb8bc38" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2170.772260] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2170.772538] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2170.772586] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2170.792690] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2170.792861] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2170.793017] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2170.793154] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2170.793278] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2170.793401] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2170.793519] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2170.793638] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2171.771515] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2174.770806] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2174.783799] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.784159] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2174.784369] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2174.784590] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2174.786185] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46dfa61-242b-462f-ba07-b45086112970 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.797879] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556e380b-9bfe-46b7-958f-18515de4e0c5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.817376] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71d210a-d8bc-43c7-9481-0a0075479d34 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.825765] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7378f8c-e883-466e-bcd1-45e493da0a02 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.855655] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180981MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2174.855817] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.855994] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2174.913692] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 01d7b088-73b4-4624-b013-2da51bf78767 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2174.913854] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance e379ca79-9458-464d-b07f-f651e474ebd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2174.913996] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2174.914134] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2174.914256] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2174.914420] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 22e27e0c-3cac-4794-b53a-4df7b8b92ec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2174.914481] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c78bd74b-1d1b-46bc-9fd8-a553f23e6671 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2174.914685] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2174.914784] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2175.001630] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d1cbb1-f048-4f9f-8d22-634e8222f68e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.009723] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6fec5e-68fc-42d4-aab1-a510232311b6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.038794] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d34d0fa-0e13-465b-a29a-87eaa00072d5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.045675] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a688ee02-d335-4f87-b876-48a687b10f24 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.059702] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2175.067405] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2175.081040] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2175.081143] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.225s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2176.077552] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2176.771693] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2176.772056] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2176.772235] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.771596] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.771897] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2179.771588] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2209.535362] env[69027]: WARNING oslo_vmware.rw_handles [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2209.535362] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2209.535362] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2209.535362] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2209.535362] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2209.535362] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 2209.535362] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2209.535362] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2209.535362] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2209.535362] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2209.535362] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2209.535362] env[69027]: ERROR oslo_vmware.rw_handles [ 2209.536054] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/17f364a6-b333-45a3-ad3a-d279f7ba8518/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2209.538074] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2209.538394] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Copying Virtual Disk [datastore2] vmware_temp/17f364a6-b333-45a3-ad3a-d279f7ba8518/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/17f364a6-b333-45a3-ad3a-d279f7ba8518/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2209.538709] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-477ba1e2-594b-4cc7-8c88-24d62793846e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.546765] env[69027]: DEBUG oslo_vmware.api [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for the task: (returnval){ [ 2209.546765] env[69027]: value = "task-3395282" [ 2209.546765] env[69027]: _type = "Task" [ 2209.546765] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.555173] env[69027]: DEBUG oslo_vmware.api [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Task: {'id': task-3395282, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.057265] env[69027]: DEBUG oslo_vmware.exceptions [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2210.057607] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2210.058195] env[69027]: ERROR nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2210.058195] env[69027]: Faults: ['InvalidArgument'] [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Traceback (most recent call last): [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] yield resources [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] self.driver.spawn(context, instance, image_meta, [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] self._fetch_image_if_missing(context, vi) [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] image_cache(vi, tmp_image_ds_loc) [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] vm_util.copy_virtual_disk( [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] session._wait_for_task(vmdk_copy_task) [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] return self.wait_for_task(task_ref) [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] return evt.wait() [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] result = hub.switch() [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] return self.greenlet.switch() [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] self.f(*self.args, **self.kw) [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] raise exceptions.translate_fault(task_info.error) [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Faults: ['InvalidArgument'] [ 2210.058195] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] [ 2210.059036] env[69027]: INFO nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Terminating instance [ 2210.061416] env[69027]: DEBUG nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2210.061612] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2210.061914] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2210.062123] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2210.062845] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3beeb39d-2e74-4d9a-9dbf-de9d3571a26a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.065648] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9baff309-4d84-4092-bf96-e2d6f87675bc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.071382] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2210.071591] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3ac0fff-b9bc-4742-9bd0-82fa2f3c5999 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.073681] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2210.073856] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2210.074809] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d103914-76f0-4f50-a7d2-6ca7f7414a33 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.079941] env[69027]: DEBUG oslo_vmware.api [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Waiting for the task: (returnval){ [ 2210.079941] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52deab94-f188-4819-c5c1-c87e2cd4cc3f" [ 2210.079941] env[69027]: _type = "Task" [ 2210.079941] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2210.090361] env[69027]: DEBUG oslo_vmware.api [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52deab94-f188-4819-c5c1-c87e2cd4cc3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.139052] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2210.139289] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2210.139472] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Deleting the datastore file [datastore2] 01d7b088-73b4-4624-b013-2da51bf78767 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2210.139737] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b0d5cb6-9ad6-4022-a933-89cc42484eac {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.145866] env[69027]: DEBUG oslo_vmware.api [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for the task: (returnval){ [ 2210.145866] env[69027]: value = "task-3395284" [ 2210.145866] env[69027]: _type = "Task" [ 2210.145866] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2210.153521] env[69027]: DEBUG oslo_vmware.api [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Task: {'id': task-3395284, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.590268] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2210.590641] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Creating directory with path [datastore2] vmware_temp/b04dc403-97f9-4bcc-99e0-bffd09ea07f4/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2210.590769] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6e65876-3c7a-4072-87c2-2c26b00e852c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.601312] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Created directory with path [datastore2] vmware_temp/b04dc403-97f9-4bcc-99e0-bffd09ea07f4/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2210.601505] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Fetch image to [datastore2] vmware_temp/b04dc403-97f9-4bcc-99e0-bffd09ea07f4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2210.601679] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/b04dc403-97f9-4bcc-99e0-bffd09ea07f4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2210.602392] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc36291-fa80-40a4-9b86-f58c8a35ce16 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.609044] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca4fa6e-232a-4ab9-8749-924f842e0a1a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.617710] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503bf4f6-9178-4638-af29-15ab15469936 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.650893] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438181e2-a6d4-42f1-a03e-f7ed8098e921 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.659279] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9e0c1416-eb67-4c54-a4a4-b4067e41c88e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.660901] env[69027]: DEBUG oslo_vmware.api [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Task: {'id': task-3395284, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077426} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.661153] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2210.661338] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2210.661532] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2210.661707] env[69027]: INFO nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2210.663742] env[69027]: DEBUG nova.compute.claims [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2210.663909] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2210.664140] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.686061] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2210.736386] env[69027]: DEBUG oslo_vmware.rw_handles [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b04dc403-97f9-4bcc-99e0-bffd09ea07f4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2210.796758] env[69027]: DEBUG oslo_vmware.rw_handles [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2210.796839] env[69027]: DEBUG oslo_vmware.rw_handles [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b04dc403-97f9-4bcc-99e0-bffd09ea07f4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2210.863301] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd400080-3bb2-4184-835c-a794b79a714c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.873519] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bdd98d-12c1-4189-8cbc-ca331050c18d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.913131] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f33c02-3612-4292-b086-09e95e248bdb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.919957] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875790eb-a27d-452c-87ee-021649862cf5 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.933809] env[69027]: DEBUG nova.compute.provider_tree [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2210.943673] env[69027]: DEBUG nova.scheduler.client.report [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2210.960321] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.960854] env[69027]: ERROR nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2210.960854] env[69027]: Faults: ['InvalidArgument'] [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Traceback (most recent call last): [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] self.driver.spawn(context, instance, image_meta, [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] self._fetch_image_if_missing(context, vi) [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] image_cache(vi, tmp_image_ds_loc) [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] vm_util.copy_virtual_disk( [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] session._wait_for_task(vmdk_copy_task) [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] return self.wait_for_task(task_ref) [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] return evt.wait() [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] result = hub.switch() [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] return self.greenlet.switch() [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] self.f(*self.args, **self.kw) [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] raise exceptions.translate_fault(task_info.error) [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Faults: ['InvalidArgument'] [ 2210.960854] env[69027]: ERROR nova.compute.manager [instance: 01d7b088-73b4-4624-b013-2da51bf78767] [ 2210.961652] env[69027]: DEBUG nova.compute.utils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2210.963169] env[69027]: DEBUG nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Build of instance 01d7b088-73b4-4624-b013-2da51bf78767 was re-scheduled: A specified parameter was not correct: fileType [ 2210.963169] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2210.963579] env[69027]: DEBUG nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2210.963757] env[69027]: DEBUG nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2210.963928] env[69027]: DEBUG nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2210.964109] env[69027]: DEBUG nova.network.neutron [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2211.305394] env[69027]: DEBUG nova.network.neutron [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2211.318717] env[69027]: INFO nova.compute.manager [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Took 0.35 seconds to deallocate network for instance. [ 2211.409023] env[69027]: INFO nova.scheduler.client.report [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Deleted allocations for instance 01d7b088-73b4-4624-b013-2da51bf78767 [ 2211.429878] env[69027]: DEBUG oslo_concurrency.lockutils [None req-5eb9d651-bb28-4b5a-bce0-7b37b3018ae5 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "01d7b088-73b4-4624-b013-2da51bf78767" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 581.546s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.429878] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "01d7b088-73b4-4624-b013-2da51bf78767" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 465.961s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.429878] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] During sync_power_state the instance has a pending task (spawning). Skip. [ 2211.429878] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "01d7b088-73b4-4624-b013-2da51bf78767" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.430187] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "01d7b088-73b4-4624-b013-2da51bf78767" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 386.030s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.430312] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Acquiring lock "01d7b088-73b4-4624-b013-2da51bf78767-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2211.431884] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "01d7b088-73b4-4624-b013-2da51bf78767-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.431884] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "01d7b088-73b4-4624-b013-2da51bf78767-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.432670] env[69027]: INFO nova.compute.manager [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Terminating instance [ 2211.435125] env[69027]: DEBUG nova.compute.manager [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2211.435332] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2211.436307] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa590b1b-4841-4327-b20f-cd5143fa9ee9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.445710] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4a5766-2460-485f-8b11-23f13b39edbb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.471705] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 01d7b088-73b4-4624-b013-2da51bf78767 could not be found. [ 2211.471909] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2211.472105] env[69027]: INFO nova.compute.manager [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2211.472349] env[69027]: DEBUG oslo.service.loopingcall [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2211.472814] env[69027]: DEBUG nova.compute.manager [-] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2211.472914] env[69027]: DEBUG nova.network.neutron [-] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2211.499963] env[69027]: DEBUG nova.network.neutron [-] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2211.507969] env[69027]: INFO nova.compute.manager [-] [instance: 01d7b088-73b4-4624-b013-2da51bf78767] Took 0.03 seconds to deallocate network for instance. [ 2211.595840] env[69027]: DEBUG oslo_concurrency.lockutils [None req-a1531523-8446-4273-85f6-9f9d24226867 tempest-ImagesTestJSON-216246989 tempest-ImagesTestJSON-216246989-project-member] Lock "01d7b088-73b4-4624-b013-2da51bf78767" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.166s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.655353] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1124a95f-902b-45f9-9c63-3a8efd2a6f00 tempest-ServerAddressesTestJSON-1671524075 tempest-ServerAddressesTestJSON-1671524075-project-member] Acquiring lock "c78bd74b-1d1b-46bc-9fd8-a553f23e6671" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.772112] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2230.772366] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2230.772424] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2230.789550] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2230.789695] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2230.789827] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2230.789954] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2230.790092] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2230.790217] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2230.790345] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2233.771581] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2235.100352] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "a1088215-4676-47d3-9df6-e835d32f4b5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.100670] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "a1088215-4676-47d3-9df6-e835d32f4b5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.111818] env[69027]: DEBUG nova.compute.manager [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2235.159399] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.159653] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.161360] env[69027]: INFO nova.compute.claims [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2235.293563] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e145c1-59a8-4f00-8b8f-6839e2442d04 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.301240] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815a01cb-f68d-44ca-b781-b70849466548 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.331629] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5e2cdb-1709-4fc4-8ad3-8eccd130b87e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.338763] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529f4257-72d4-41c3-bad5-6d059e7ab09e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.353210] env[69027]: DEBUG nova.compute.provider_tree [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2235.362126] env[69027]: DEBUG nova.scheduler.client.report [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2235.377557] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.218s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2235.378012] env[69027]: DEBUG nova.compute.manager [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2235.411950] env[69027]: DEBUG nova.compute.utils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2235.413258] env[69027]: DEBUG nova.compute.manager [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2235.413438] env[69027]: DEBUG nova.network.neutron [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2235.420934] env[69027]: DEBUG nova.compute.manager [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2235.482463] env[69027]: DEBUG nova.compute.manager [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2235.498465] env[69027]: DEBUG nova.policy [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58e66320388b4e8294205232eec8cfaa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '947495558dff46eb9951fadfc3d12d32', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 2235.507635] env[69027]: DEBUG nova.virt.hardware [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2235.507865] env[69027]: DEBUG nova.virt.hardware [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2235.508040] env[69027]: DEBUG nova.virt.hardware [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2235.508231] env[69027]: DEBUG nova.virt.hardware [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2235.508380] env[69027]: DEBUG nova.virt.hardware [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2235.508529] env[69027]: DEBUG nova.virt.hardware [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2235.508736] env[69027]: DEBUG nova.virt.hardware [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2235.508900] env[69027]: DEBUG nova.virt.hardware [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2235.509081] env[69027]: DEBUG nova.virt.hardware [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2235.509250] env[69027]: DEBUG nova.virt.hardware [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2235.509422] env[69027]: DEBUG nova.virt.hardware [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2235.510282] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276d840f-7465-4772-a668-c114e07b296e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.518294] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6786f4c-9805-416e-9beb-dc47257d433b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.766449] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2235.800163] env[69027]: DEBUG nova.network.neutron [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Successfully created port: 78fdc6a5-c823-4135-8b26-153b07b4808d {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2236.465868] env[69027]: DEBUG nova.compute.manager [req-dfc9e0fd-1cb2-4eb6-a1ae-52b09ac92406 req-f1d76ffe-1023-4f88-b49c-3a01b9ae5c56 service nova] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Received event network-vif-plugged-78fdc6a5-c823-4135-8b26-153b07b4808d {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2236.466223] env[69027]: DEBUG oslo_concurrency.lockutils [req-dfc9e0fd-1cb2-4eb6-a1ae-52b09ac92406 req-f1d76ffe-1023-4f88-b49c-3a01b9ae5c56 service nova] Acquiring lock "a1088215-4676-47d3-9df6-e835d32f4b5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2236.466544] env[69027]: DEBUG oslo_concurrency.lockutils [req-dfc9e0fd-1cb2-4eb6-a1ae-52b09ac92406 req-f1d76ffe-1023-4f88-b49c-3a01b9ae5c56 service nova] Lock "a1088215-4676-47d3-9df6-e835d32f4b5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.466802] env[69027]: DEBUG oslo_concurrency.lockutils [req-dfc9e0fd-1cb2-4eb6-a1ae-52b09ac92406 req-f1d76ffe-1023-4f88-b49c-3a01b9ae5c56 service nova] Lock "a1088215-4676-47d3-9df6-e835d32f4b5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.467075] env[69027]: DEBUG nova.compute.manager [req-dfc9e0fd-1cb2-4eb6-a1ae-52b09ac92406 req-f1d76ffe-1023-4f88-b49c-3a01b9ae5c56 service nova] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] No waiting events found dispatching network-vif-plugged-78fdc6a5-c823-4135-8b26-153b07b4808d {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2236.467339] env[69027]: WARNING nova.compute.manager [req-dfc9e0fd-1cb2-4eb6-a1ae-52b09ac92406 req-f1d76ffe-1023-4f88-b49c-3a01b9ae5c56 service nova] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Received unexpected event network-vif-plugged-78fdc6a5-c823-4135-8b26-153b07b4808d for instance with vm_state building and task_state spawning. [ 2236.560547] env[69027]: DEBUG nova.network.neutron [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Successfully updated port: 78fdc6a5-c823-4135-8b26-153b07b4808d {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2236.575470] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "refresh_cache-a1088215-4676-47d3-9df6-e835d32f4b5a" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2236.575470] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquired lock "refresh_cache-a1088215-4676-47d3-9df6-e835d32f4b5a" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2236.575649] env[69027]: DEBUG nova.network.neutron [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2236.625950] env[69027]: DEBUG nova.network.neutron [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2236.771475] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2236.782572] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2236.783058] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.783058] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2236.783182] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2236.784437] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b1a95b-6f77-424f-a3f6-7dc952657075 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.793808] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f888bff3-57ce-4cc2-bd85-7be540e7f64c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.809711] env[69027]: DEBUG nova.network.neutron [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Updating instance_info_cache with network_info: [{"id": "78fdc6a5-c823-4135-8b26-153b07b4808d", "address": "fa:16:3e:01:0b:cc", "network": {"id": "c071ab13-b295-41c7-b89e-e6a7e7eb2ed2", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-170762747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947495558dff46eb9951fadfc3d12d32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78fdc6a5-c8", "ovs_interfaceid": "78fdc6a5-c823-4135-8b26-153b07b4808d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2236.811542] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1009125-d04d-4391-a8b6-bcecdd57a093 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.818729] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451d0afb-b8c3-4cd7-9404-1bdbce77056b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.823008] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Releasing lock "refresh_cache-a1088215-4676-47d3-9df6-e835d32f4b5a" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2236.823317] env[69027]: DEBUG nova.compute.manager [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Instance network_info: |[{"id": "78fdc6a5-c823-4135-8b26-153b07b4808d", "address": "fa:16:3e:01:0b:cc", "network": {"id": "c071ab13-b295-41c7-b89e-e6a7e7eb2ed2", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-170762747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947495558dff46eb9951fadfc3d12d32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78fdc6a5-c8", "ovs_interfaceid": "78fdc6a5-c823-4135-8b26-153b07b4808d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2236.824181] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:0b:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56136ef6-99d7-4562-9a9f-d66fec951c5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78fdc6a5-c823-4135-8b26-153b07b4808d', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2236.831711] env[69027]: DEBUG oslo.service.loopingcall [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2236.855789] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2236.856230] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180989MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2236.856369] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2236.856565] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2236.858189] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2e729b8-497d-49b0-81f8-6727dec27b61 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.878465] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2236.878465] env[69027]: value = "task-3395285" [ 2236.878465] env[69027]: _type = "Task" [ 2236.878465] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2236.886373] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395285, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.922243] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance e379ca79-9458-464d-b07f-f651e474ebd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2236.922386] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2236.922517] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2236.922642] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2236.922761] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 22e27e0c-3cac-4794-b53a-4df7b8b92ec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2236.922882] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c78bd74b-1d1b-46bc-9fd8-a553f23e6671 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2236.923031] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a1088215-4676-47d3-9df6-e835d32f4b5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2236.923248] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2236.923395] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2237.036811] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6b0e28-5be8-4743-8b6e-118d7adb6b5c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.045214] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425d01d5-5206-4f55-b2f2-ed45d2510c2d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.076419] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa6dd1a-32fc-4bf4-ae06-bd44fbc96017 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.083903] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0bc2330-6604-4567-81d9-9fc3d704791d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.097326] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2237.106165] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2237.120016] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2237.120234] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.264s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.390112] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395285, 'name': CreateVM_Task, 'duration_secs': 0.308449} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2237.390327] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2237.391326] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2237.391584] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2237.392084] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2237.392435] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c57e7e88-78fe-4ec2-92aa-868cfc38e0fb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.398197] env[69027]: DEBUG oslo_vmware.api [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for the task: (returnval){ [ 2237.398197] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]526642c2-dac1-19e8-c2c7-d7b05fdf810a" [ 2237.398197] env[69027]: _type = "Task" [ 2237.398197] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2237.407880] env[69027]: DEBUG oslo_vmware.api [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]526642c2-dac1-19e8-c2c7-d7b05fdf810a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.909104] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2237.909435] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2237.909574] env[69027]: DEBUG oslo_concurrency.lockutils [None req-1b465854-973d-476e-9fbb-36c30254eead tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2238.497412] env[69027]: DEBUG nova.compute.manager [req-26120ae9-b33d-44e6-8ffd-e0521299e09a req-f805fd0d-7658-4b42-91c7-1dc71c64c3c1 service nova] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Received event network-changed-78fdc6a5-c823-4135-8b26-153b07b4808d {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2238.497617] env[69027]: DEBUG nova.compute.manager [req-26120ae9-b33d-44e6-8ffd-e0521299e09a req-f805fd0d-7658-4b42-91c7-1dc71c64c3c1 service nova] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Refreshing instance network info cache due to event network-changed-78fdc6a5-c823-4135-8b26-153b07b4808d. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2238.497828] env[69027]: DEBUG oslo_concurrency.lockutils [req-26120ae9-b33d-44e6-8ffd-e0521299e09a req-f805fd0d-7658-4b42-91c7-1dc71c64c3c1 service nova] Acquiring lock "refresh_cache-a1088215-4676-47d3-9df6-e835d32f4b5a" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2238.497975] env[69027]: DEBUG oslo_concurrency.lockutils [req-26120ae9-b33d-44e6-8ffd-e0521299e09a req-f805fd0d-7658-4b42-91c7-1dc71c64c3c1 service nova] Acquired lock "refresh_cache-a1088215-4676-47d3-9df6-e835d32f4b5a" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2238.498314] env[69027]: DEBUG nova.network.neutron [req-26120ae9-b33d-44e6-8ffd-e0521299e09a req-f805fd0d-7658-4b42-91c7-1dc71c64c3c1 service nova] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Refreshing network info cache for port 78fdc6a5-c823-4135-8b26-153b07b4808d {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2238.945710] env[69027]: DEBUG nova.network.neutron [req-26120ae9-b33d-44e6-8ffd-e0521299e09a req-f805fd0d-7658-4b42-91c7-1dc71c64c3c1 service nova] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Updated VIF entry in instance network info cache for port 78fdc6a5-c823-4135-8b26-153b07b4808d. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2238.946419] env[69027]: DEBUG nova.network.neutron [req-26120ae9-b33d-44e6-8ffd-e0521299e09a req-f805fd0d-7658-4b42-91c7-1dc71c64c3c1 service nova] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Updating instance_info_cache with network_info: [{"id": "78fdc6a5-c823-4135-8b26-153b07b4808d", "address": "fa:16:3e:01:0b:cc", "network": {"id": "c071ab13-b295-41c7-b89e-e6a7e7eb2ed2", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-170762747-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947495558dff46eb9951fadfc3d12d32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56136ef6-99d7-4562-9a9f-d66fec951c5c", "external-id": "nsx-vlan-transportzone-32", "segmentation_id": 32, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78fdc6a5-c8", "ovs_interfaceid": "78fdc6a5-c823-4135-8b26-153b07b4808d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2238.956333] env[69027]: DEBUG oslo_concurrency.lockutils [req-26120ae9-b33d-44e6-8ffd-e0521299e09a req-f805fd0d-7658-4b42-91c7-1dc71c64c3c1 service nova] Releasing lock "refresh_cache-a1088215-4676-47d3-9df6-e835d32f4b5a" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2239.119778] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.120032] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.120198] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.767413] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.784467] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2239.784628] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2241.771910] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2247.825488] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Acquiring lock "fc594297-bbf9-4ea4-82c6-a709b4e0c3f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2247.825795] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Lock "fc594297-bbf9-4ea4-82c6-a709b4e0c3f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2247.837857] env[69027]: DEBUG nova.compute.manager [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2247.888638] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2247.888913] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2247.890710] env[69027]: INFO nova.compute.claims [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2248.038897] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0815f847-a1f8-4257-8b38-1137e6e3c27c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.047399] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4a4bd8-f95f-4940-8b98-f0a6093ee9c0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.078149] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273adc1f-ae70-46d9-a454-653614200b94 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.085470] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a448e894-c856-4a6a-94c9-aabb56a0b1cf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.098404] env[69027]: DEBUG nova.compute.provider_tree [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2248.106777] env[69027]: DEBUG nova.scheduler.client.report [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2248.119647] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.231s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.120109] env[69027]: DEBUG nova.compute.manager [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2248.149741] env[69027]: DEBUG nova.compute.utils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2248.150900] env[69027]: DEBUG nova.compute.manager [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2248.154080] env[69027]: DEBUG nova.network.neutron [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2248.164740] env[69027]: DEBUG nova.compute.manager [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2248.213357] env[69027]: DEBUG nova.policy [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7a032afd4c346f8b6875da8f346b63a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6037bebe102b4bb2a9e32f2770732193', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 2248.232203] env[69027]: DEBUG nova.compute.manager [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2248.263310] env[69027]: DEBUG nova.virt.hardware [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2248.263577] env[69027]: DEBUG nova.virt.hardware [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2248.263732] env[69027]: DEBUG nova.virt.hardware [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2248.264229] env[69027]: DEBUG nova.virt.hardware [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2248.264229] env[69027]: DEBUG nova.virt.hardware [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2248.264335] env[69027]: DEBUG nova.virt.hardware [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2248.264493] env[69027]: DEBUG nova.virt.hardware [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2248.264760] env[69027]: DEBUG nova.virt.hardware [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2248.264829] env[69027]: DEBUG nova.virt.hardware [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2248.264990] env[69027]: DEBUG nova.virt.hardware [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2248.265220] env[69027]: DEBUG nova.virt.hardware [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2248.266111] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1fb8883-f3e3-4c52-8026-4c45fac3f655 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.274244] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c08fc8-5d90-4560-bc47-f7f64350d591 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.563320] env[69027]: DEBUG nova.network.neutron [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Successfully created port: 462d3b8b-f254-4b3a-aec4-e917af482ff7 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2249.303041] env[69027]: DEBUG nova.compute.manager [req-b4dba3a5-0ba4-44e6-bd20-8b3c762030c6 req-d3d74993-52e3-42eb-b279-15682ddff3d3 service nova] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Received event network-vif-plugged-462d3b8b-f254-4b3a-aec4-e917af482ff7 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2249.303284] env[69027]: DEBUG oslo_concurrency.lockutils [req-b4dba3a5-0ba4-44e6-bd20-8b3c762030c6 req-d3d74993-52e3-42eb-b279-15682ddff3d3 service nova] Acquiring lock "fc594297-bbf9-4ea4-82c6-a709b4e0c3f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2249.303506] env[69027]: DEBUG oslo_concurrency.lockutils [req-b4dba3a5-0ba4-44e6-bd20-8b3c762030c6 req-d3d74993-52e3-42eb-b279-15682ddff3d3 service nova] Lock "fc594297-bbf9-4ea4-82c6-a709b4e0c3f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2249.303691] env[69027]: DEBUG oslo_concurrency.lockutils [req-b4dba3a5-0ba4-44e6-bd20-8b3c762030c6 req-d3d74993-52e3-42eb-b279-15682ddff3d3 service nova] Lock "fc594297-bbf9-4ea4-82c6-a709b4e0c3f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2249.303861] env[69027]: DEBUG nova.compute.manager [req-b4dba3a5-0ba4-44e6-bd20-8b3c762030c6 req-d3d74993-52e3-42eb-b279-15682ddff3d3 service nova] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] No waiting events found dispatching network-vif-plugged-462d3b8b-f254-4b3a-aec4-e917af482ff7 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2249.304811] env[69027]: WARNING nova.compute.manager [req-b4dba3a5-0ba4-44e6-bd20-8b3c762030c6 req-d3d74993-52e3-42eb-b279-15682ddff3d3 service nova] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Received unexpected event network-vif-plugged-462d3b8b-f254-4b3a-aec4-e917af482ff7 for instance with vm_state building and task_state spawning. [ 2249.407263] env[69027]: DEBUG nova.network.neutron [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Successfully updated port: 462d3b8b-f254-4b3a-aec4-e917af482ff7 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2249.418406] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Acquiring lock "refresh_cache-fc594297-bbf9-4ea4-82c6-a709b4e0c3f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2249.420214] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Acquired lock "refresh_cache-fc594297-bbf9-4ea4-82c6-a709b4e0c3f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2249.420480] env[69027]: DEBUG nova.network.neutron [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2249.482184] env[69027]: DEBUG nova.network.neutron [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2249.653359] env[69027]: DEBUG nova.network.neutron [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Updating instance_info_cache with network_info: [{"id": "462d3b8b-f254-4b3a-aec4-e917af482ff7", "address": "fa:16:3e:bc:6b:4c", "network": {"id": "2cc76786-2da0-422a-9911-dd5b20923794", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-812059931-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6037bebe102b4bb2a9e32f2770732193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9732690c-bdcf-4e6f-9a32-42c196333eb8", "external-id": "nsx-vlan-transportzone-548", "segmentation_id": 548, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap462d3b8b-f2", "ovs_interfaceid": "462d3b8b-f254-4b3a-aec4-e917af482ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2249.667062] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Releasing lock "refresh_cache-fc594297-bbf9-4ea4-82c6-a709b4e0c3f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2249.667339] env[69027]: DEBUG nova.compute.manager [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Instance network_info: |[{"id": "462d3b8b-f254-4b3a-aec4-e917af482ff7", "address": "fa:16:3e:bc:6b:4c", "network": {"id": "2cc76786-2da0-422a-9911-dd5b20923794", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-812059931-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6037bebe102b4bb2a9e32f2770732193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9732690c-bdcf-4e6f-9a32-42c196333eb8", "external-id": "nsx-vlan-transportzone-548", "segmentation_id": 548, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap462d3b8b-f2", "ovs_interfaceid": "462d3b8b-f254-4b3a-aec4-e917af482ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2249.667717] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:6b:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9732690c-bdcf-4e6f-9a32-42c196333eb8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '462d3b8b-f254-4b3a-aec4-e917af482ff7', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2249.675328] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Creating folder: Project (6037bebe102b4bb2a9e32f2770732193). Parent ref: group-v677321. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2249.675808] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a7277b2-ac2a-4800-9450-eb4099cb6f89 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.686913] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Created folder: Project (6037bebe102b4bb2a9e32f2770732193) in parent group-v677321. [ 2249.687077] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Creating folder: Instances. Parent ref: group-v677430. {{(pid=69027) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2249.687289] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01757da7-3df9-4ef2-bcdb-f16e6154d394 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.695858] env[69027]: INFO nova.virt.vmwareapi.vm_util [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Created folder: Instances in parent group-v677430. [ 2249.696086] env[69027]: DEBUG oslo.service.loopingcall [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2249.696260] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2249.696442] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1797ade2-0efd-4322-9aec-44cbf582ebcb {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.714803] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2249.714803] env[69027]: value = "task-3395288" [ 2249.714803] env[69027]: _type = "Task" [ 2249.714803] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2249.721674] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395288, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.224511] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395288, 'name': CreateVM_Task, 'duration_secs': 0.282968} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2250.224691] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2250.225460] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2250.225630] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2250.225937] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2250.226262] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11030737-ebc7-4747-ac08-9ccd388bb809 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2250.231151] env[69027]: DEBUG oslo_vmware.api [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Waiting for the task: (returnval){ [ 2250.231151] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]527132c0-fbbe-e6bd-f147-9c64b98fc1ab" [ 2250.231151] env[69027]: _type = "Task" [ 2250.231151] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2250.241272] env[69027]: DEBUG oslo_vmware.api [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]527132c0-fbbe-e6bd-f147-9c64b98fc1ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2250.740845] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2250.741221] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2250.741318] env[69027]: DEBUG oslo_concurrency.lockutils [None req-95505ace-a4c2-4eba-bb76-0dc09e295548 tempest-ServerTagsTestJSON-866648544 tempest-ServerTagsTestJSON-866648544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2251.330962] env[69027]: DEBUG nova.compute.manager [req-399098af-cdad-4757-b383-2416afa4451b req-31a6497c-897b-4f4c-bb2f-cf4197c813a9 service nova] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Received event network-changed-462d3b8b-f254-4b3a-aec4-e917af482ff7 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2251.331195] env[69027]: DEBUG nova.compute.manager [req-399098af-cdad-4757-b383-2416afa4451b req-31a6497c-897b-4f4c-bb2f-cf4197c813a9 service nova] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Refreshing instance network info cache due to event network-changed-462d3b8b-f254-4b3a-aec4-e917af482ff7. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2251.331410] env[69027]: DEBUG oslo_concurrency.lockutils [req-399098af-cdad-4757-b383-2416afa4451b req-31a6497c-897b-4f4c-bb2f-cf4197c813a9 service nova] Acquiring lock "refresh_cache-fc594297-bbf9-4ea4-82c6-a709b4e0c3f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2251.331555] env[69027]: DEBUG oslo_concurrency.lockutils [req-399098af-cdad-4757-b383-2416afa4451b req-31a6497c-897b-4f4c-bb2f-cf4197c813a9 service nova] Acquired lock "refresh_cache-fc594297-bbf9-4ea4-82c6-a709b4e0c3f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2251.331716] env[69027]: DEBUG nova.network.neutron [req-399098af-cdad-4757-b383-2416afa4451b req-31a6497c-897b-4f4c-bb2f-cf4197c813a9 service nova] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Refreshing network info cache for port 462d3b8b-f254-4b3a-aec4-e917af482ff7 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2251.593453] env[69027]: DEBUG nova.network.neutron [req-399098af-cdad-4757-b383-2416afa4451b req-31a6497c-897b-4f4c-bb2f-cf4197c813a9 service nova] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Updated VIF entry in instance network info cache for port 462d3b8b-f254-4b3a-aec4-e917af482ff7. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2251.593809] env[69027]: DEBUG nova.network.neutron [req-399098af-cdad-4757-b383-2416afa4451b req-31a6497c-897b-4f4c-bb2f-cf4197c813a9 service nova] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Updating instance_info_cache with network_info: [{"id": "462d3b8b-f254-4b3a-aec4-e917af482ff7", "address": "fa:16:3e:bc:6b:4c", "network": {"id": "2cc76786-2da0-422a-9911-dd5b20923794", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-812059931-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6037bebe102b4bb2a9e32f2770732193", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9732690c-bdcf-4e6f-9a32-42c196333eb8", "external-id": "nsx-vlan-transportzone-548", "segmentation_id": 548, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap462d3b8b-f2", "ovs_interfaceid": "462d3b8b-f254-4b3a-aec4-e917af482ff7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2251.603579] env[69027]: DEBUG oslo_concurrency.lockutils [req-399098af-cdad-4757-b383-2416afa4451b req-31a6497c-897b-4f4c-bb2f-cf4197c813a9 service nova] Releasing lock "refresh_cache-fc594297-bbf9-4ea4-82c6-a709b4e0c3f8" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2259.423645] env[69027]: WARNING oslo_vmware.rw_handles [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2259.423645] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2259.423645] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2259.423645] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2259.423645] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2259.423645] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 2259.423645] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2259.423645] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2259.423645] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2259.423645] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2259.423645] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2259.423645] env[69027]: ERROR oslo_vmware.rw_handles [ 2259.424400] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/b04dc403-97f9-4bcc-99e0-bffd09ea07f4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2259.426125] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2259.426386] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Copying Virtual Disk [datastore2] vmware_temp/b04dc403-97f9-4bcc-99e0-bffd09ea07f4/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/b04dc403-97f9-4bcc-99e0-bffd09ea07f4/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2259.426668] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07f5469c-e795-4d87-afba-e2f7bb53b9a2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.434668] env[69027]: DEBUG oslo_vmware.api [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Waiting for the task: (returnval){ [ 2259.434668] env[69027]: value = "task-3395289" [ 2259.434668] env[69027]: _type = "Task" [ 2259.434668] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.442010] env[69027]: DEBUG oslo_vmware.api [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Task: {'id': task-3395289, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.944987] env[69027]: DEBUG oslo_vmware.exceptions [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2259.945298] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2259.945857] env[69027]: ERROR nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2259.945857] env[69027]: Faults: ['InvalidArgument'] [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Traceback (most recent call last): [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] yield resources [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] self.driver.spawn(context, instance, image_meta, [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] self._fetch_image_if_missing(context, vi) [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] image_cache(vi, tmp_image_ds_loc) [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] vm_util.copy_virtual_disk( [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] session._wait_for_task(vmdk_copy_task) [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] return self.wait_for_task(task_ref) [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] return evt.wait() [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] result = hub.switch() [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] return self.greenlet.switch() [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] self.f(*self.args, **self.kw) [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] raise exceptions.translate_fault(task_info.error) [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Faults: ['InvalidArgument'] [ 2259.945857] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] [ 2259.946853] env[69027]: INFO nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Terminating instance [ 2259.947735] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2259.947945] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2259.948192] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a4c4019-b843-4240-aeca-fac0ed7a1308 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.950155] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquiring lock "refresh_cache-e379ca79-9458-464d-b07f-f651e474ebd7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2259.950321] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquired lock "refresh_cache-e379ca79-9458-464d-b07f-f651e474ebd7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2259.950488] env[69027]: DEBUG nova.network.neutron [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2259.956981] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2259.957204] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2259.958390] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-745672d1-6718-4179-a30d-506d51993e44 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.965499] env[69027]: DEBUG oslo_vmware.api [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Waiting for the task: (returnval){ [ 2259.965499] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]529d2ed1-61b1-5169-45d2-15f3ace2572d" [ 2259.965499] env[69027]: _type = "Task" [ 2259.965499] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.972621] env[69027]: DEBUG oslo_vmware.api [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]529d2ed1-61b1-5169-45d2-15f3ace2572d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.979410] env[69027]: DEBUG nova.network.neutron [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2260.042675] env[69027]: DEBUG nova.network.neutron [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2260.052847] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Releasing lock "refresh_cache-e379ca79-9458-464d-b07f-f651e474ebd7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2260.053252] env[69027]: DEBUG nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2260.053448] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2260.054535] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc1016a-65d7-4b4d-a67d-ff753bd44e6d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.062182] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2260.062397] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e93b6b78-148d-4809-be16-c7bfed5cde67 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.090526] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2260.090718] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2260.090894] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Deleting the datastore file [datastore2] e379ca79-9458-464d-b07f-f651e474ebd7 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2260.091143] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f489881c-d75e-4388-802a-9298047ee43a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.096731] env[69027]: DEBUG oslo_vmware.api [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Waiting for the task: (returnval){ [ 2260.096731] env[69027]: value = "task-3395291" [ 2260.096731] env[69027]: _type = "Task" [ 2260.096731] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2260.104607] env[69027]: DEBUG oslo_vmware.api [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Task: {'id': task-3395291, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.476240] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2260.476511] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Creating directory with path [datastore2] vmware_temp/2c4ebe72-7c7e-49c7-9e7f-1c2d90a0ff6a/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2260.476738] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2db70e9-28b4-4e8a-9542-bbc1e746c2dc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.488042] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Created directory with path [datastore2] vmware_temp/2c4ebe72-7c7e-49c7-9e7f-1c2d90a0ff6a/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2260.488237] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Fetch image to [datastore2] vmware_temp/2c4ebe72-7c7e-49c7-9e7f-1c2d90a0ff6a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2260.488410] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/2c4ebe72-7c7e-49c7-9e7f-1c2d90a0ff6a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2260.489134] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5c515e-19d4-49ce-83e8-4fb3b14823c9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.495716] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd60410b-e00f-4153-af96-3f27b08e849b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.504539] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b571cf5-9527-4024-847c-0d2bfdaa7ca7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.536790] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a1a842-1f93-49ae-9ce2-bd84826a2e2d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.542402] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a8cc6bab-090e-4fee-972b-c6aaf9b229f8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.567127] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2260.605819] env[69027]: DEBUG oslo_vmware.api [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Task: {'id': task-3395291, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039509} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2260.606080] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2260.606415] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2260.606616] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2260.607197] env[69027]: INFO nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Took 0.55 seconds to destroy the instance on the hypervisor. [ 2260.607495] env[69027]: DEBUG oslo.service.loopingcall [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2260.607718] env[69027]: DEBUG nova.compute.manager [-] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Skipping network deallocation for instance since networking was not requested. {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2260.609741] env[69027]: DEBUG nova.compute.claims [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2260.609918] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2260.610145] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2260.614870] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c4ebe72-7c7e-49c7-9e7f-1c2d90a0ff6a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2260.675679] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2260.675757] env[69027]: DEBUG oslo_vmware.rw_handles [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2c4ebe72-7c7e-49c7-9e7f-1c2d90a0ff6a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2260.771966] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac20b8d-b7e3-468f-a394-7aeebc631ac9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.779370] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47530760-932d-4567-baf6-c1c79738fc13 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.808270] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0954c9f3-a2ea-4334-b6b4-4fcd863f2693 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.814908] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9075b6-ca93-4cf2-a7aa-602175e4041c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.828323] env[69027]: DEBUG nova.compute.provider_tree [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2260.836914] env[69027]: DEBUG nova.scheduler.client.report [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2260.850888] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.241s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2260.851468] env[69027]: ERROR nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2260.851468] env[69027]: Faults: ['InvalidArgument'] [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Traceback (most recent call last): [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] self.driver.spawn(context, instance, image_meta, [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] self._fetch_image_if_missing(context, vi) [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] image_cache(vi, tmp_image_ds_loc) [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] vm_util.copy_virtual_disk( [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] session._wait_for_task(vmdk_copy_task) [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] return self.wait_for_task(task_ref) [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] return evt.wait() [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] result = hub.switch() [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] return self.greenlet.switch() [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] self.f(*self.args, **self.kw) [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] raise exceptions.translate_fault(task_info.error) [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Faults: ['InvalidArgument'] [ 2260.851468] env[69027]: ERROR nova.compute.manager [instance: e379ca79-9458-464d-b07f-f651e474ebd7] [ 2260.852296] env[69027]: DEBUG nova.compute.utils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2260.853537] env[69027]: DEBUG nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Build of instance e379ca79-9458-464d-b07f-f651e474ebd7 was re-scheduled: A specified parameter was not correct: fileType [ 2260.853537] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2260.853909] env[69027]: DEBUG nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2260.854175] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquiring lock "refresh_cache-e379ca79-9458-464d-b07f-f651e474ebd7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2260.854331] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquired lock "refresh_cache-e379ca79-9458-464d-b07f-f651e474ebd7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2260.854492] env[69027]: DEBUG nova.network.neutron [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2260.877114] env[69027]: DEBUG nova.network.neutron [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2260.933181] env[69027]: DEBUG nova.network.neutron [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2260.945031] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Releasing lock "refresh_cache-e379ca79-9458-464d-b07f-f651e474ebd7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2260.945031] env[69027]: DEBUG nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2260.945290] env[69027]: DEBUG nova.compute.manager [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Skipping network deallocation for instance since networking was not requested. {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2261.026722] env[69027]: INFO nova.scheduler.client.report [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Deleted allocations for instance e379ca79-9458-464d-b07f-f651e474ebd7 [ 2261.045280] env[69027]: DEBUG oslo_concurrency.lockutils [None req-ea334d79-450b-48c6-bf49-9662e68fcd96 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Lock "e379ca79-9458-464d-b07f-f651e474ebd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 493.733s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2261.045531] env[69027]: DEBUG oslo_concurrency.lockutils [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Lock "e379ca79-9458-464d-b07f-f651e474ebd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 297.810s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2261.045740] env[69027]: DEBUG oslo_concurrency.lockutils [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquiring lock "e379ca79-9458-464d-b07f-f651e474ebd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2261.045949] env[69027]: DEBUG oslo_concurrency.lockutils [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Lock "e379ca79-9458-464d-b07f-f651e474ebd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2261.046126] env[69027]: DEBUG oslo_concurrency.lockutils [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Lock "e379ca79-9458-464d-b07f-f651e474ebd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2261.047926] env[69027]: INFO nova.compute.manager [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Terminating instance [ 2261.049463] env[69027]: DEBUG oslo_concurrency.lockutils [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquiring lock "refresh_cache-e379ca79-9458-464d-b07f-f651e474ebd7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2261.049619] env[69027]: DEBUG oslo_concurrency.lockutils [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Acquired lock "refresh_cache-e379ca79-9458-464d-b07f-f651e474ebd7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2261.049784] env[69027]: DEBUG nova.network.neutron [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2261.075401] env[69027]: DEBUG nova.network.neutron [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2261.131407] env[69027]: DEBUG nova.network.neutron [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2261.140115] env[69027]: DEBUG oslo_concurrency.lockutils [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Releasing lock "refresh_cache-e379ca79-9458-464d-b07f-f651e474ebd7" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2261.140498] env[69027]: DEBUG nova.compute.manager [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2261.140690] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2261.141200] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4213b5e1-38e0-4dd0-89e2-7df9915aa7b3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.149986] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87480796-43f8-4516-99be-585c513bf1b0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.176631] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e379ca79-9458-464d-b07f-f651e474ebd7 could not be found. [ 2261.176821] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2261.176997] env[69027]: INFO nova.compute.manager [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2261.177251] env[69027]: DEBUG oslo.service.loopingcall [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2261.177460] env[69027]: DEBUG nova.compute.manager [-] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2261.177556] env[69027]: DEBUG nova.network.neutron [-] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2261.193957] env[69027]: DEBUG nova.network.neutron [-] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2261.201562] env[69027]: DEBUG nova.network.neutron [-] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2261.209493] env[69027]: INFO nova.compute.manager [-] [instance: e379ca79-9458-464d-b07f-f651e474ebd7] Took 0.03 seconds to deallocate network for instance. [ 2261.291473] env[69027]: DEBUG oslo_concurrency.lockutils [None req-59ba8273-8778-4799-ba2e-806857dc36c8 tempest-ServersListShow296Test-264880002 tempest-ServersListShow296Test-264880002-project-member] Lock "e379ca79-9458-464d-b07f-f651e474ebd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.246s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2290.771594] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2290.771891] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2290.771927] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2290.791833] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2290.791991] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2290.792144] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2290.792285] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2290.792409] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2290.792529] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2290.792654] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2290.792774] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2295.771443] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2297.767476] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2298.771811] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2298.782797] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2298.783027] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2298.783209] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2298.783365] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2298.784902] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08372170-5ecb-4eaf-b9de-8ccf52473282 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.793513] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de36afa1-3158-4913-b021-85d4e537cdfa {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.807364] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d6603d-ec6c-4bc6-b6ef-c470a583e62e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.813413] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4560d65a-dfe1-44ae-bbf7-d4b36a191bd3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.842007] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180943MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2298.842176] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2298.842368] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2298.906558] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2298.906718] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2298.906848] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2298.906970] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 22e27e0c-3cac-4794-b53a-4df7b8b92ec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2298.907107] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c78bd74b-1d1b-46bc-9fd8-a553f23e6671 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2298.907230] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a1088215-4676-47d3-9df6-e835d32f4b5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2298.907347] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fc594297-bbf9-4ea4-82c6-a709b4e0c3f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2298.907529] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2298.907665] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2298.994346] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2cc2dec-74f8-4735-8287-346264aba328 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.003120] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b277d957-e16d-4550-9f89-1036af5a6213 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.032530] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47dcc779-f9a7-451c-9287-831ea4b8bc42 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.039455] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7aae737-1443-401e-bd47-ea7917028dc0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2299.053020] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2299.060102] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2299.073798] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2299.073986] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.232s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2300.073944] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2300.771457] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2300.771698] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2301.771541] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2301.771843] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2301.771949] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2307.416871] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "647c0734-c3ed-47eb-807a-e8034e5378f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2307.417177] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "647c0734-c3ed-47eb-807a-e8034e5378f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2307.427382] env[69027]: DEBUG nova.compute.manager [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Starting instance... {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2307.477034] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2307.477285] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2307.478647] env[69027]: INFO nova.compute.claims [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2307.618555] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fea1527-465d-4467-baf9-c55480a6cba3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.626428] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79ac14c-bb70-426d-8503-9802c389465f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.656421] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e46e90-f473-44fe-8a78-1ebc73661cf7 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.663508] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8d9a50-e99c-4bda-a0e0-fd6e51c68e6b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.676988] env[69027]: DEBUG nova.compute.provider_tree [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2307.685843] env[69027]: DEBUG nova.scheduler.client.report [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2307.699766] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.222s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2307.700229] env[69027]: DEBUG nova.compute.manager [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Start building networks asynchronously for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2307.731861] env[69027]: DEBUG nova.compute.utils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Using /dev/sd instead of None {{(pid=69027) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2307.733367] env[69027]: DEBUG nova.compute.manager [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Allocating IP information in the background. {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2307.733547] env[69027]: DEBUG nova.network.neutron [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] allocate_for_instance() {{(pid=69027) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2307.741682] env[69027]: DEBUG nova.compute.manager [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Start building block device mappings for instance. {{(pid=69027) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2307.804361] env[69027]: DEBUG nova.compute.manager [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Start spawning the instance on the hypervisor. {{(pid=69027) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2307.832223] env[69027]: DEBUG nova.virt.hardware [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-10T13:32:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-10T13:32:02Z,direct_url=,disk_format='vmdk',id=1f242793-8cbc-47db-8e09-30ca2e488bdf,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='baa826edead146bab87cd5ad749bedb8',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-10T13:32:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2307.832604] env[69027]: DEBUG nova.virt.hardware [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Flavor limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2307.832727] env[69027]: DEBUG nova.virt.hardware [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Image limits 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2307.832871] env[69027]: DEBUG nova.virt.hardware [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Flavor pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2307.833033] env[69027]: DEBUG nova.virt.hardware [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Image pref 0:0:0 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2307.833188] env[69027]: DEBUG nova.virt.hardware [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69027) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2307.833389] env[69027]: DEBUG nova.virt.hardware [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2307.833548] env[69027]: DEBUG nova.virt.hardware [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2307.833714] env[69027]: DEBUG nova.virt.hardware [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Got 1 possible topologies {{(pid=69027) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2307.833881] env[69027]: DEBUG nova.virt.hardware [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2307.834130] env[69027]: DEBUG nova.virt.hardware [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69027) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2307.834999] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcf055c-f968-44e6-b6cd-4c879e9922f4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.838828] env[69027]: DEBUG nova.policy [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9cae5394177466e9afb1f8fa26e15ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ed36a72c2994c47a7313f7bbb37640a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69027) authorize /opt/stack/nova/nova/policy.py:203}} [ 2307.847333] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3f273b-1f4a-417d-a95b-a08a2997a8c8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.321118] env[69027]: DEBUG nova.network.neutron [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Successfully created port: ea057a5c-7e4b-4d84-9ca7-b731bba48b85 {{(pid=69027) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2308.482354] env[69027]: WARNING oslo_vmware.rw_handles [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2308.482354] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2308.482354] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2308.482354] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2308.482354] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2308.482354] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 2308.482354] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2308.482354] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2308.482354] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2308.482354] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2308.482354] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2308.482354] env[69027]: ERROR oslo_vmware.rw_handles [ 2308.482936] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/2c4ebe72-7c7e-49c7-9e7f-1c2d90a0ff6a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2308.487023] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2308.487023] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Copying Virtual Disk [datastore2] vmware_temp/2c4ebe72-7c7e-49c7-9e7f-1c2d90a0ff6a/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/2c4ebe72-7c7e-49c7-9e7f-1c2d90a0ff6a/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2308.487023] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0ec8d1c-3e10-4c34-ab85-dcde2af78eb4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.494241] env[69027]: DEBUG oslo_vmware.api [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Waiting for the task: (returnval){ [ 2308.494241] env[69027]: value = "task-3395292" [ 2308.494241] env[69027]: _type = "Task" [ 2308.494241] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2308.502440] env[69027]: DEBUG oslo_vmware.api [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Task: {'id': task-3395292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2308.904616] env[69027]: DEBUG nova.compute.manager [req-74fbd3fb-083e-49cd-b9ee-870a12b9bc09 req-d76464d1-821c-47e8-8d6b-6cd975d671c3 service nova] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Received event network-vif-plugged-ea057a5c-7e4b-4d84-9ca7-b731bba48b85 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2308.904867] env[69027]: DEBUG oslo_concurrency.lockutils [req-74fbd3fb-083e-49cd-b9ee-870a12b9bc09 req-d76464d1-821c-47e8-8d6b-6cd975d671c3 service nova] Acquiring lock "647c0734-c3ed-47eb-807a-e8034e5378f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2308.905095] env[69027]: DEBUG oslo_concurrency.lockutils [req-74fbd3fb-083e-49cd-b9ee-870a12b9bc09 req-d76464d1-821c-47e8-8d6b-6cd975d671c3 service nova] Lock "647c0734-c3ed-47eb-807a-e8034e5378f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2308.905271] env[69027]: DEBUG oslo_concurrency.lockutils [req-74fbd3fb-083e-49cd-b9ee-870a12b9bc09 req-d76464d1-821c-47e8-8d6b-6cd975d671c3 service nova] Lock "647c0734-c3ed-47eb-807a-e8034e5378f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2308.905447] env[69027]: DEBUG nova.compute.manager [req-74fbd3fb-083e-49cd-b9ee-870a12b9bc09 req-d76464d1-821c-47e8-8d6b-6cd975d671c3 service nova] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] No waiting events found dispatching network-vif-plugged-ea057a5c-7e4b-4d84-9ca7-b731bba48b85 {{(pid=69027) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2308.905611] env[69027]: WARNING nova.compute.manager [req-74fbd3fb-083e-49cd-b9ee-870a12b9bc09 req-d76464d1-821c-47e8-8d6b-6cd975d671c3 service nova] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Received unexpected event network-vif-plugged-ea057a5c-7e4b-4d84-9ca7-b731bba48b85 for instance with vm_state building and task_state spawning. [ 2308.959212] env[69027]: DEBUG nova.network.neutron [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Successfully updated port: ea057a5c-7e4b-4d84-9ca7-b731bba48b85 {{(pid=69027) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2308.970541] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "refresh_cache-647c0734-c3ed-47eb-807a-e8034e5378f1" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2308.970669] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired lock "refresh_cache-647c0734-c3ed-47eb-807a-e8034e5378f1" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2308.970783] env[69027]: DEBUG nova.network.neutron [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Building network info cache for instance {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2309.004938] env[69027]: DEBUG oslo_vmware.exceptions [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2309.005233] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2309.005837] env[69027]: ERROR nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2309.005837] env[69027]: Faults: ['InvalidArgument'] [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Traceback (most recent call last): [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] yield resources [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] self.driver.spawn(context, instance, image_meta, [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] self._fetch_image_if_missing(context, vi) [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] image_cache(vi, tmp_image_ds_loc) [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] vm_util.copy_virtual_disk( [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] session._wait_for_task(vmdk_copy_task) [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] return self.wait_for_task(task_ref) [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] return evt.wait() [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] result = hub.switch() [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] return self.greenlet.switch() [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] self.f(*self.args, **self.kw) [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] raise exceptions.translate_fault(task_info.error) [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Faults: ['InvalidArgument'] [ 2309.005837] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] [ 2309.007466] env[69027]: INFO nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Terminating instance [ 2309.007601] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2309.007821] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2309.009689] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65c6424d-c504-4234-b5d9-25425c5f3aa1 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.011376] env[69027]: DEBUG nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2309.011560] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2309.012557] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744f668f-d9f6-4385-9e2c-2d3d2c1d6a7e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.024081] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2309.024270] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2309.025011] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2309.025236] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdd71bf2-42fd-4b92-9bf8-b76066b1bbe0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.027289] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f38e6f41-ba88-4310-9d25-6ad436765dbf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.031364] env[69027]: DEBUG oslo_vmware.api [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for the task: (returnval){ [ 2309.031364] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]521c191f-7694-22ad-68f9-0bb09864eb99" [ 2309.031364] env[69027]: _type = "Task" [ 2309.031364] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2309.036498] env[69027]: DEBUG nova.network.neutron [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Instance cache missing network info. {{(pid=69027) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2309.041076] env[69027]: DEBUG oslo_vmware.api [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]521c191f-7694-22ad-68f9-0bb09864eb99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2309.107704] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2309.108413] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2309.108413] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Deleting the datastore file [datastore2] 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2309.108619] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e277140-b106-4ad5-9585-aab776c3f401 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.115483] env[69027]: DEBUG oslo_vmware.api [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Waiting for the task: (returnval){ [ 2309.115483] env[69027]: value = "task-3395294" [ 2309.115483] env[69027]: _type = "Task" [ 2309.115483] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2309.126062] env[69027]: DEBUG oslo_vmware.api [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Task: {'id': task-3395294, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2309.208729] env[69027]: DEBUG nova.network.neutron [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Updating instance_info_cache with network_info: [{"id": "ea057a5c-7e4b-4d84-9ca7-b731bba48b85", "address": "fa:16:3e:f0:92:38", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea057a5c-7e", "ovs_interfaceid": "ea057a5c-7e4b-4d84-9ca7-b731bba48b85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2309.221951] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Releasing lock "refresh_cache-647c0734-c3ed-47eb-807a-e8034e5378f1" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2309.222240] env[69027]: DEBUG nova.compute.manager [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Instance network_info: |[{"id": "ea057a5c-7e4b-4d84-9ca7-b731bba48b85", "address": "fa:16:3e:f0:92:38", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea057a5c-7e", "ovs_interfaceid": "ea057a5c-7e4b-4d84-9ca7-b731bba48b85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69027) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2309.222648] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:92:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8f580e6-1d86-41ee-9ebe-c531cb9299c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea057a5c-7e4b-4d84-9ca7-b731bba48b85', 'vif_model': 'vmxnet3'}] {{(pid=69027) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2309.230389] env[69027]: DEBUG oslo.service.loopingcall [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2309.230852] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Creating VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2309.231117] env[69027]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68fc3e4d-bdc6-4f5a-9cf9-4e7c2301fbb9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.251424] env[69027]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2309.251424] env[69027]: value = "task-3395295" [ 2309.251424] env[69027]: _type = "Task" [ 2309.251424] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2309.258791] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395295, 'name': CreateVM_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2309.542552] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2309.542904] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Creating directory with path [datastore2] vmware_temp/daa01dd0-c394-417a-9643-991eb0758a26/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2309.543077] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74b0f2d2-40ee-457c-93c8-e77a4eb85d0a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.553622] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Created directory with path [datastore2] vmware_temp/daa01dd0-c394-417a-9643-991eb0758a26/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2309.553796] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Fetch image to [datastore2] vmware_temp/daa01dd0-c394-417a-9643-991eb0758a26/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2309.553966] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/daa01dd0-c394-417a-9643-991eb0758a26/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2309.554748] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedb3427-b6dd-42f6-8242-2b0facb44aa9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.561100] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3518052-94d3-4764-a65b-19e19cb063c2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.570147] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86d7858-88c9-4637-9aeb-54aa34277f9d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.599895] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbf986d-39f0-4ff7-b07b-3dd49a4854e6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.605241] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fa8bbe9e-b0f7-4527-bfc3-7dde761a6c95 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.624131] env[69027]: DEBUG oslo_vmware.api [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Task: {'id': task-3395294, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070021} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2309.625561] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2309.625762] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2309.625946] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2309.626138] env[69027]: INFO nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2309.627927] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2309.630593] env[69027]: DEBUG nova.compute.claims [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2309.630593] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2309.630593] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2309.677519] env[69027]: DEBUG oslo_vmware.rw_handles [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/daa01dd0-c394-417a-9643-991eb0758a26/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2309.737678] env[69027]: DEBUG oslo_vmware.rw_handles [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2309.737870] env[69027]: DEBUG oslo_vmware.rw_handles [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/daa01dd0-c394-417a-9643-991eb0758a26/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2309.761275] env[69027]: DEBUG oslo_vmware.api [-] Task: {'id': task-3395295, 'name': CreateVM_Task, 'duration_secs': 0.263221} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2309.761439] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Created VM on the ESX host {{(pid=69027) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2309.762079] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2309.762251] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2309.762551] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2309.762812] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f87b006-7a73-4e26-99d7-e3935e9ddb9d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.766802] env[69027]: DEBUG oslo_vmware.api [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for the task: (returnval){ [ 2309.766802] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52e41be5-b6df-8f28-f7a1-f5da27b967d0" [ 2309.766802] env[69027]: _type = "Task" [ 2309.766802] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2309.775880] env[69027]: DEBUG oslo_vmware.api [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52e41be5-b6df-8f28-f7a1-f5da27b967d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2309.817052] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5633c4-47c3-4df4-93ba-c8022581c4f8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.823324] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8a9068-fb83-4fe8-81ba-f373098381b4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.853597] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a70db5e-d2c4-4fb7-a496-23081ac15895 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.860138] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13f874b-72ca-4f5a-bad9-0b76c8c21827 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.872609] env[69027]: DEBUG nova.compute.provider_tree [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2309.880994] env[69027]: DEBUG nova.scheduler.client.report [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2309.894242] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.264s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2309.894777] env[69027]: ERROR nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2309.894777] env[69027]: Faults: ['InvalidArgument'] [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Traceback (most recent call last): [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] self.driver.spawn(context, instance, image_meta, [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] self._fetch_image_if_missing(context, vi) [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] image_cache(vi, tmp_image_ds_loc) [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] vm_util.copy_virtual_disk( [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] session._wait_for_task(vmdk_copy_task) [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] return self.wait_for_task(task_ref) [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] return evt.wait() [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] result = hub.switch() [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] return self.greenlet.switch() [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] self.f(*self.args, **self.kw) [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] raise exceptions.translate_fault(task_info.error) [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Faults: ['InvalidArgument'] [ 2309.894777] env[69027]: ERROR nova.compute.manager [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] [ 2309.895517] env[69027]: DEBUG nova.compute.utils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2309.896829] env[69027]: DEBUG nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Build of instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 was re-scheduled: A specified parameter was not correct: fileType [ 2309.896829] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2309.897210] env[69027]: DEBUG nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2309.897384] env[69027]: DEBUG nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2309.897568] env[69027]: DEBUG nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2309.897733] env[69027]: DEBUG nova.network.neutron [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2310.232643] env[69027]: DEBUG nova.network.neutron [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2310.245767] env[69027]: INFO nova.compute.manager [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Took 0.35 seconds to deallocate network for instance. [ 2310.279371] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2310.279717] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Processing image 1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2310.279944] env[69027]: DEBUG oslo_concurrency.lockutils [None req-15fe1337-9c06-4552-8302-0bbb1f9b58a0 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2310.336730] env[69027]: INFO nova.scheduler.client.report [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Deleted allocations for instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 [ 2310.357242] env[69027]: DEBUG oslo_concurrency.lockutils [None req-c5924521-fb2d-46fa-b086-e17546fc383f tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 514.498s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.357242] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 318.093s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.357438] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Acquiring lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2310.357638] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2310.357811] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.359789] env[69027]: INFO nova.compute.manager [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Terminating instance [ 2310.361745] env[69027]: DEBUG nova.compute.manager [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2310.361942] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2310.362211] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26c053f2-f0c8-4e98-a5d2-1e841e405091 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.372484] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5defbf-a0c9-409c-bf93-c97e5c0527d9 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.401014] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9f12e2a1-852b-4d55-8e38-ddeb9adb3053 could not be found. [ 2310.401258] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2310.401489] env[69027]: INFO nova.compute.manager [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2310.401774] env[69027]: DEBUG oslo.service.loopingcall [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2310.402063] env[69027]: DEBUG nova.compute.manager [-] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2310.402192] env[69027]: DEBUG nova.network.neutron [-] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2310.428445] env[69027]: DEBUG nova.network.neutron [-] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2310.436805] env[69027]: INFO nova.compute.manager [-] [instance: 9f12e2a1-852b-4d55-8e38-ddeb9adb3053] Took 0.03 seconds to deallocate network for instance. [ 2310.516980] env[69027]: DEBUG oslo_concurrency.lockutils [None req-e7775781-6fa4-4de8-be40-df02fb345417 tempest-ServersNegativeTestMultiTenantJSON-1011173409 tempest-ServersNegativeTestMultiTenantJSON-1011173409-project-member] Lock "9f12e2a1-852b-4d55-8e38-ddeb9adb3053" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2310.934814] env[69027]: DEBUG nova.compute.manager [req-4251c3af-d0d6-491c-9ba6-5d704831370a req-37bc7864-223a-402e-85f5-58d974aca7a7 service nova] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Received event network-changed-ea057a5c-7e4b-4d84-9ca7-b731bba48b85 {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2310.935090] env[69027]: DEBUG nova.compute.manager [req-4251c3af-d0d6-491c-9ba6-5d704831370a req-37bc7864-223a-402e-85f5-58d974aca7a7 service nova] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Refreshing instance network info cache due to event network-changed-ea057a5c-7e4b-4d84-9ca7-b731bba48b85. {{(pid=69027) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2310.935279] env[69027]: DEBUG oslo_concurrency.lockutils [req-4251c3af-d0d6-491c-9ba6-5d704831370a req-37bc7864-223a-402e-85f5-58d974aca7a7 service nova] Acquiring lock "refresh_cache-647c0734-c3ed-47eb-807a-e8034e5378f1" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2310.935423] env[69027]: DEBUG oslo_concurrency.lockutils [req-4251c3af-d0d6-491c-9ba6-5d704831370a req-37bc7864-223a-402e-85f5-58d974aca7a7 service nova] Acquired lock "refresh_cache-647c0734-c3ed-47eb-807a-e8034e5378f1" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2310.935585] env[69027]: DEBUG nova.network.neutron [req-4251c3af-d0d6-491c-9ba6-5d704831370a req-37bc7864-223a-402e-85f5-58d974aca7a7 service nova] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Refreshing network info cache for port ea057a5c-7e4b-4d84-9ca7-b731bba48b85 {{(pid=69027) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2311.257616] env[69027]: DEBUG nova.network.neutron [req-4251c3af-d0d6-491c-9ba6-5d704831370a req-37bc7864-223a-402e-85f5-58d974aca7a7 service nova] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Updated VIF entry in instance network info cache for port ea057a5c-7e4b-4d84-9ca7-b731bba48b85. {{(pid=69027) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2311.257616] env[69027]: DEBUG nova.network.neutron [req-4251c3af-d0d6-491c-9ba6-5d704831370a req-37bc7864-223a-402e-85f5-58d974aca7a7 service nova] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Updating instance_info_cache with network_info: [{"id": "ea057a5c-7e4b-4d84-9ca7-b731bba48b85", "address": "fa:16:3e:f0:92:38", "network": {"id": "fa0ff98c-2cb4-4063-83a0-e81a33114512", "bridge": "br-int", "label": "tempest-ServersTestJSON-484934499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed36a72c2994c47a7313f7bbb37640a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8f580e6-1d86-41ee-9ebe-c531cb9299c6", "external-id": "nsx-vlan-transportzone-150", "segmentation_id": 150, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea057a5c-7e", "ovs_interfaceid": "ea057a5c-7e4b-4d84-9ca7-b731bba48b85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2311.266960] env[69027]: DEBUG oslo_concurrency.lockutils [req-4251c3af-d0d6-491c-9ba6-5d704831370a req-37bc7864-223a-402e-85f5-58d974aca7a7 service nova] Releasing lock "refresh_cache-647c0734-c3ed-47eb-807a-e8034e5378f1" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2342.044761] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2342.045268] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Getting list of instances from cluster (obj){ [ 2342.045268] env[69027]: value = "domain-c8" [ 2342.045268] env[69027]: _type = "ClusterComputeResource" [ 2342.045268] env[69027]: } {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2342.046327] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b9586c-a21e-4e7d-b8ec-5a88b641532f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.060521] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Got total of 7 instances {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2347.056737] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2347.075335] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Getting list of instances from cluster (obj){ [ 2347.075335] env[69027]: value = "domain-c8" [ 2347.075335] env[69027]: _type = "ClusterComputeResource" [ 2347.075335] env[69027]: } {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2347.076914] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044d81b5-3d10-4778-84cf-7866376fac4c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.091027] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Got total of 7 instances {{(pid=69027) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2347.091203] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid f069ae93-e79f-4c89-99b8-f3ee70895ee6 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2347.091433] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2347.091644] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 22e27e0c-3cac-4794-b53a-4df7b8b92ec9 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2347.091812] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid c78bd74b-1d1b-46bc-9fd8-a553f23e6671 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2347.091968] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid a1088215-4676-47d3-9df6-e835d32f4b5a {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2347.092136] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid fc594297-bbf9-4ea4-82c6-a709b4e0c3f8 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2347.092288] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Triggering sync for uuid 647c0734-c3ed-47eb-807a-e8034e5378f1 {{(pid=69027) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2347.092594] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.092826] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.093037] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "22e27e0c-3cac-4794-b53a-4df7b8b92ec9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.093250] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "c78bd74b-1d1b-46bc-9fd8-a553f23e6671" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.093441] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "a1088215-4676-47d3-9df6-e835d32f4b5a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.093637] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "fc594297-bbf9-4ea4-82c6-a709b4e0c3f8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2347.093833] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "647c0734-c3ed-47eb-807a-e8034e5378f1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2352.808610] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2352.808918] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2352.808918] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2352.825402] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2352.825573] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2352.825701] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2352.825829] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2352.825951] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2352.826087] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2352.826211] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2352.826367] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2356.772204] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.458059] env[69027]: WARNING oslo_vmware.rw_handles [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2359.458059] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2359.458059] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2359.458059] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2359.458059] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2359.458059] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 2359.458059] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2359.458059] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2359.458059] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2359.458059] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2359.458059] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2359.458059] env[69027]: ERROR oslo_vmware.rw_handles [ 2359.458059] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/daa01dd0-c394-417a-9643-991eb0758a26/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2359.460272] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2359.460543] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Copying Virtual Disk [datastore2] vmware_temp/daa01dd0-c394-417a-9643-991eb0758a26/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/daa01dd0-c394-417a-9643-991eb0758a26/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2359.460870] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee256241-0ac2-47e4-8183-f5f603accbf8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.470119] env[69027]: DEBUG oslo_vmware.api [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for the task: (returnval){ [ 2359.470119] env[69027]: value = "task-3395296" [ 2359.470119] env[69027]: _type = "Task" [ 2359.470119] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2359.478874] env[69027]: DEBUG oslo_vmware.api [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Task: {'id': task-3395296, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2359.766866] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.770450] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.782020] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.782249] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2359.782415] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2359.782588] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2359.783660] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8c9527-a219-40fe-b6da-f6a406a9995a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.792176] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e91bde-d8c1-472d-ae26-140ba70ae468 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.805596] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf596dd-8443-4bca-b065-015a6a0ca11a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.811638] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d917c2-f832-47a7-878b-de70656238d8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.840582] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180957MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2359.840723] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2359.840911] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2359.965422] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2359.965601] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2359.965774] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 22e27e0c-3cac-4794-b53a-4df7b8b92ec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2359.965905] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c78bd74b-1d1b-46bc-9fd8-a553f23e6671 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2359.966034] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a1088215-4676-47d3-9df6-e835d32f4b5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2359.966157] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fc594297-bbf9-4ea4-82c6-a709b4e0c3f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2359.966272] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 647c0734-c3ed-47eb-807a-e8034e5378f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2359.966474] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2359.966611] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2359.982016] env[69027]: DEBUG oslo_vmware.exceptions [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2359.984303] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2359.984856] env[69027]: ERROR nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2359.984856] env[69027]: Faults: ['InvalidArgument'] [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Traceback (most recent call last): [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] yield resources [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] self.driver.spawn(context, instance, image_meta, [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] self._fetch_image_if_missing(context, vi) [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] image_cache(vi, tmp_image_ds_loc) [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] vm_util.copy_virtual_disk( [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] session._wait_for_task(vmdk_copy_task) [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] return self.wait_for_task(task_ref) [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] return evt.wait() [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] result = hub.switch() [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] return self.greenlet.switch() [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] self.f(*self.args, **self.kw) [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] raise exceptions.translate_fault(task_info.error) [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Faults: ['InvalidArgument'] [ 2359.984856] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] [ 2359.986116] env[69027]: INFO nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Terminating instance [ 2359.986756] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2359.986971] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2359.987217] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1297f4f7-f264-44cc-93f6-1b56aaaad43b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.989244] env[69027]: DEBUG nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2359.989444] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2359.990160] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82811516-b231-413c-ad1b-7581b4b2c90e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.996955] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2359.999168] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea054127-11ab-4420-9b80-a00173243bdf {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.001303] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2360.001477] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2360.002392] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ad86530-01a2-43e7-b69b-c79e4edc0fd6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.007015] env[69027]: DEBUG oslo_vmware.api [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for the task: (returnval){ [ 2360.007015] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52f6e1c7-1592-b8b1-76dc-02585f28ea76" [ 2360.007015] env[69027]: _type = "Task" [ 2360.007015] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2360.017184] env[69027]: DEBUG oslo_vmware.api [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52f6e1c7-1592-b8b1-76dc-02585f28ea76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2360.062498] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ed568e-ac22-475b-8af9-58061328f846 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.066247] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2360.066455] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2360.066637] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Deleting the datastore file [datastore2] f069ae93-e79f-4c89-99b8-f3ee70895ee6 {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2360.067206] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17114052-00e6-4151-bdf5-65858410721f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.071700] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b11cc0a-bc00-4b4f-9ff2-9df4ebe57804 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.075435] env[69027]: DEBUG oslo_vmware.api [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for the task: (returnval){ [ 2360.075435] env[69027]: value = "task-3395298" [ 2360.075435] env[69027]: _type = "Task" [ 2360.075435] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2360.104048] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e262ebd-014f-49a4-a2c9-86f32807d87c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.108504] env[69027]: DEBUG oslo_vmware.api [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Task: {'id': task-3395298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2360.113838] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c906d63-c2f0-4345-8dc2-8d730d94a47e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.126722] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2360.135568] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2360.152313] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2360.152531] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.312s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2360.517373] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2360.517802] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Creating directory with path [datastore2] vmware_temp/bf25f2f1-9c0d-4849-beb4-a494a9efba33/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2360.517967] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eece46f9-2bd2-45b9-a678-cd6c2710eafd {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.528831] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Created directory with path [datastore2] vmware_temp/bf25f2f1-9c0d-4849-beb4-a494a9efba33/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2360.529035] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Fetch image to [datastore2] vmware_temp/bf25f2f1-9c0d-4849-beb4-a494a9efba33/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2360.529219] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/bf25f2f1-9c0d-4849-beb4-a494a9efba33/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2360.529909] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7115ea-08f9-4b22-b40f-8088ce7d8d41 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.536580] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52793ff8-6057-4972-96ff-2dd517b56174 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.545424] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbede532-d672-499f-a8a6-4fbd443fccb3 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.574926] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a09d3c-03ca-4925-9005-24ecc298435f {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.586537] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-818361c8-c95b-475c-aea0-fb30ed309078 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.588198] env[69027]: DEBUG oslo_vmware.api [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Task: {'id': task-3395298, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073434} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2360.588440] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2360.588623] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2360.588792] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2360.588965] env[69027]: INFO nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2360.591014] env[69027]: DEBUG nova.compute.claims [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2360.591206] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2360.591421] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2360.619083] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2360.670349] env[69027]: DEBUG oslo_vmware.rw_handles [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bf25f2f1-9c0d-4849-beb4-a494a9efba33/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2360.730202] env[69027]: DEBUG oslo_vmware.rw_handles [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2360.730404] env[69027]: DEBUG oslo_vmware.rw_handles [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bf25f2f1-9c0d-4849-beb4-a494a9efba33/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2360.788244] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0f86c4-10bb-4b77-8b1a-433662b630a4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.795504] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb7fd7a-13d4-4179-b701-f0e56cfe0e81 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.824121] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9e91e5-a5ec-4d81-aa85-b98eb4e06f1a {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.830653] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8778ec95-54a9-4fe9-9c5b-b448a47c2b6c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.842932] env[69027]: DEBUG nova.compute.provider_tree [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2360.851350] env[69027]: DEBUG nova.scheduler.client.report [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2360.864395] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.273s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2360.864906] env[69027]: ERROR nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2360.864906] env[69027]: Faults: ['InvalidArgument'] [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Traceback (most recent call last): [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] self.driver.spawn(context, instance, image_meta, [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] self._fetch_image_if_missing(context, vi) [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] image_cache(vi, tmp_image_ds_loc) [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] vm_util.copy_virtual_disk( [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] session._wait_for_task(vmdk_copy_task) [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] return self.wait_for_task(task_ref) [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] return evt.wait() [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] result = hub.switch() [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] return self.greenlet.switch() [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] self.f(*self.args, **self.kw) [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] raise exceptions.translate_fault(task_info.error) [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Faults: ['InvalidArgument'] [ 2360.864906] env[69027]: ERROR nova.compute.manager [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] [ 2360.865800] env[69027]: DEBUG nova.compute.utils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2360.866999] env[69027]: DEBUG nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Build of instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 was re-scheduled: A specified parameter was not correct: fileType [ 2360.866999] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2360.867378] env[69027]: DEBUG nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2360.867552] env[69027]: DEBUG nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2360.867723] env[69027]: DEBUG nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2360.867896] env[69027]: DEBUG nova.network.neutron [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2361.131689] env[69027]: DEBUG nova.network.neutron [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2361.142279] env[69027]: INFO nova.compute.manager [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Took 0.27 seconds to deallocate network for instance. [ 2361.153426] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2361.243018] env[69027]: INFO nova.scheduler.client.report [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Deleted allocations for instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 [ 2361.268387] env[69027]: DEBUG oslo_concurrency.lockutils [None req-d8d5789d-ad95-45c2-94be-c1120e5e7064 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 518.868s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2361.268755] env[69027]: DEBUG oslo_concurrency.lockutils [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 322.349s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2361.268912] env[69027]: DEBUG oslo_concurrency.lockutils [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2361.269117] env[69027]: DEBUG oslo_concurrency.lockutils [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2361.269580] env[69027]: DEBUG oslo_concurrency.lockutils [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2361.271831] env[69027]: INFO nova.compute.manager [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Terminating instance [ 2361.273717] env[69027]: DEBUG nova.compute.manager [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2361.273913] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2361.274400] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91f8748a-99f3-4203-8157-3f41989e5b2d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.286573] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7627548c-10f5-4d23-86f1-a02dcb5daa2d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.313994] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f069ae93-e79f-4c89-99b8-f3ee70895ee6 could not be found. [ 2361.314231] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2361.314413] env[69027]: INFO nova.compute.manager [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2361.314663] env[69027]: DEBUG oslo.service.loopingcall [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2361.314878] env[69027]: DEBUG nova.compute.manager [-] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2361.314972] env[69027]: DEBUG nova.network.neutron [-] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2361.351043] env[69027]: DEBUG nova.network.neutron [-] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2361.359713] env[69027]: INFO nova.compute.manager [-] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] Took 0.04 seconds to deallocate network for instance. [ 2361.443626] env[69027]: DEBUG oslo_concurrency.lockutils [None req-826744d4-01c5-4bf3-839b-30d451f64cd6 tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2361.444473] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.352s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2361.444667] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: f069ae93-e79f-4c89-99b8-f3ee70895ee6] During sync_power_state the instance has a pending task (deleting). Skip. [ 2361.444845] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "f069ae93-e79f-4c89-99b8-f3ee70895ee6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2361.767015] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2361.783982] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2362.771606] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2363.771865] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2363.772239] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2363.772239] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2363.772420] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2365.780030] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2365.780308] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances with incomplete migration {{(pid=69027) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2371.782609] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2371.782609] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Cleaning up deleted instances {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2371.793450] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] There are 0 instances to clean {{(pid=69027) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2409.606168] env[69027]: WARNING oslo_vmware.rw_handles [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2409.606168] env[69027]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2409.606168] env[69027]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2409.606168] env[69027]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2409.606168] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2409.606168] env[69027]: ERROR oslo_vmware.rw_handles response.begin() [ 2409.606168] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2409.606168] env[69027]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2409.606168] env[69027]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2409.606168] env[69027]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2409.606168] env[69027]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2409.606168] env[69027]: ERROR oslo_vmware.rw_handles [ 2409.606831] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Downloaded image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to vmware_temp/bf25f2f1-9c0d-4849-beb4-a494a9efba33/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2409.608851] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Caching image {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2409.609179] env[69027]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Copying Virtual Disk [datastore2] vmware_temp/bf25f2f1-9c0d-4849-beb4-a494a9efba33/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk to [datastore2] vmware_temp/bf25f2f1-9c0d-4849-beb4-a494a9efba33/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk {{(pid=69027) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2409.609466] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bdf8b04d-dc21-44c3-9d1f-363d2177266d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.617256] env[69027]: DEBUG oslo_vmware.api [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for the task: (returnval){ [ 2409.617256] env[69027]: value = "task-3395299" [ 2409.617256] env[69027]: _type = "Task" [ 2409.617256] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2409.625586] env[69027]: DEBUG oslo_vmware.api [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Task: {'id': task-3395299, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.129357] env[69027]: DEBUG oslo_vmware.exceptions [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Fault InvalidArgument not matched. {{(pid=69027) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2410.129640] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2410.130255] env[69027]: ERROR nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2410.130255] env[69027]: Faults: ['InvalidArgument'] [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Traceback (most recent call last): [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] yield resources [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] self.driver.spawn(context, instance, image_meta, [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] self._fetch_image_if_missing(context, vi) [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] image_cache(vi, tmp_image_ds_loc) [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] vm_util.copy_virtual_disk( [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] session._wait_for_task(vmdk_copy_task) [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] return self.wait_for_task(task_ref) [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] return evt.wait() [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] result = hub.switch() [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] return self.greenlet.switch() [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] self.f(*self.args, **self.kw) [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] raise exceptions.translate_fault(task_info.error) [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Faults: ['InvalidArgument'] [ 2410.130255] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] [ 2410.131215] env[69027]: INFO nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Terminating instance [ 2410.132239] env[69027]: DEBUG oslo_concurrency.lockutils [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1f242793-8cbc-47db-8e09-30ca2e488bdf/1f242793-8cbc-47db-8e09-30ca2e488bdf.vmdk" {{(pid=69027) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2410.132452] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2410.133103] env[69027]: DEBUG nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2410.133305] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2410.133544] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0da9f0bd-53fe-4b72-aacb-631546b496ce {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.135762] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e00663b-4423-4434-ac87-9b084a3c37d0 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.142520] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Unregistering the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2410.142744] env[69027]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48de3976-3468-4744-8565-a0788d44b787 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.144821] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2410.144997] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69027) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2410.145923] env[69027]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3adbc600-fbe9-4659-a5a2-cd6795bf7f3c {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.150736] env[69027]: DEBUG oslo_vmware.api [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Waiting for the task: (returnval){ [ 2410.150736] env[69027]: value = "session[52927036-1fe8-2440-5648-6648c565a79d]52eb9680-70e2-92ad-168f-f0fff5be90ba" [ 2410.150736] env[69027]: _type = "Task" [ 2410.150736] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.159371] env[69027]: DEBUG oslo_vmware.api [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Task: {'id': session[52927036-1fe8-2440-5648-6648c565a79d]52eb9680-70e2-92ad-168f-f0fff5be90ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.208365] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Unregistered the VM {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2410.208609] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Deleting contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2410.208765] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Deleting the datastore file [datastore2] ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2410.209053] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-83cf1be1-2edc-4ae8-947a-bc31226f73b4 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.215566] env[69027]: DEBUG oslo_vmware.api [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for the task: (returnval){ [ 2410.215566] env[69027]: value = "task-3395301" [ 2410.215566] env[69027]: _type = "Task" [ 2410.215566] env[69027]: } to complete. {{(pid=69027) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.222947] env[69027]: DEBUG oslo_vmware.api [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Task: {'id': task-3395301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.661160] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Preparing fetch location {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2410.661474] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Creating directory with path [datastore2] vmware_temp/3724ab7a-80d2-401f-8f93-47547ef55841/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2410.661663] env[69027]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee9827b1-07d1-4332-95d6-a24545dd9d95 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.673420] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Created directory with path [datastore2] vmware_temp/3724ab7a-80d2-401f-8f93-47547ef55841/1f242793-8cbc-47db-8e09-30ca2e488bdf {{(pid=69027) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2410.673636] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Fetch image to [datastore2] vmware_temp/3724ab7a-80d2-401f-8f93-47547ef55841/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk {{(pid=69027) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2410.673788] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to [datastore2] vmware_temp/3724ab7a-80d2-401f-8f93-47547ef55841/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk on the data store datastore2 {{(pid=69027) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2410.674536] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80eb688-3d2d-4fb0-9fe1-0daff9dc8131 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.680813] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ada5fd-44e6-4294-a938-73f1ac50ebbc {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.689588] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86dc582b-b63b-4d05-bdd0-23ab780d735d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.723051] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54111ea1-4aef-45d6-8318-5d0a98aa570d {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.729942] env[69027]: DEBUG oslo_vmware.api [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Task: {'id': task-3395301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090071} completed successfully. {{(pid=69027) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2410.731422] env[69027]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Deleted the datastore file {{(pid=69027) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2410.731614] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Deleted contents of the VM from datastore datastore2 {{(pid=69027) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2410.731792] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2410.731966] env[69027]: INFO nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2410.733974] env[69027]: DEBUG nova.compute.claims [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Aborting claim: {{(pid=69027) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2410.734169] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2410.734394] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2410.736972] env[69027]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ef729814-de8b-4305-9c98-0a5cb7db93d2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.757376] env[69027]: DEBUG nova.virt.vmwareapi.images [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Downloading image file data 1f242793-8cbc-47db-8e09-30ca2e488bdf to the data store datastore2 {{(pid=69027) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2410.793618] env[69027]: DEBUG nova.scheduler.client.report [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Refreshing inventories for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2410.807772] env[69027]: DEBUG nova.scheduler.client.report [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Updating ProviderTree inventory for provider 4923c91f-3b2b-4ad1-a821-36209acae639 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2410.807997] env[69027]: DEBUG nova.compute.provider_tree [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Updating inventory in ProviderTree for provider 4923c91f-3b2b-4ad1-a821-36209acae639 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2410.811360] env[69027]: DEBUG oslo_vmware.rw_handles [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3724ab7a-80d2-401f-8f93-47547ef55841/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2410.867616] env[69027]: DEBUG nova.scheduler.client.report [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Refreshing aggregate associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, aggregates: None {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2410.871956] env[69027]: DEBUG oslo_vmware.rw_handles [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Completed reading data from the image iterator. {{(pid=69027) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2410.872155] env[69027]: DEBUG oslo_vmware.rw_handles [None req-203a4c75-e45d-4d07-b45f-06a287624524 tempest-ServersTestJSON-2138845674 tempest-ServersTestJSON-2138845674-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3724ab7a-80d2-401f-8f93-47547ef55841/1f242793-8cbc-47db-8e09-30ca2e488bdf/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69027) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2410.886525] env[69027]: DEBUG nova.scheduler.client.report [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Refreshing trait associations for resource provider 4923c91f-3b2b-4ad1-a821-36209acae639, traits: COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=69027) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2410.970116] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9345b360-a52a-4287-abbd-e1ba8633061b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.977493] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d773bc2d-eb57-4dd2-8982-cbe7b8920d80 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.006306] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9074157d-9fd8-4f10-b477-95743d3f6d9e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.012879] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286e2f9c-4f6d-4c36-8b54-2b058a3267a6 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.025242] env[69027]: DEBUG nova.compute.provider_tree [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2411.035145] env[69027]: DEBUG nova.scheduler.client.report [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2411.047732] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.313s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.048247] env[69027]: ERROR nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2411.048247] env[69027]: Faults: ['InvalidArgument'] [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Traceback (most recent call last): [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] self.driver.spawn(context, instance, image_meta, [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] self._fetch_image_if_missing(context, vi) [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] image_cache(vi, tmp_image_ds_loc) [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] vm_util.copy_virtual_disk( [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] session._wait_for_task(vmdk_copy_task) [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] return self.wait_for_task(task_ref) [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] return evt.wait() [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] result = hub.switch() [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] return self.greenlet.switch() [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] self.f(*self.args, **self.kw) [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] raise exceptions.translate_fault(task_info.error) [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Faults: ['InvalidArgument'] [ 2411.048247] env[69027]: ERROR nova.compute.manager [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] [ 2411.049143] env[69027]: DEBUG nova.compute.utils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] VimFaultException {{(pid=69027) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2411.050307] env[69027]: DEBUG nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Build of instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea was re-scheduled: A specified parameter was not correct: fileType [ 2411.050307] env[69027]: Faults: ['InvalidArgument'] {{(pid=69027) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2411.050669] env[69027]: DEBUG nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Unplugging VIFs for instance {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2411.050844] env[69027]: DEBUG nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69027) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2411.051030] env[69027]: DEBUG nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2411.051215] env[69027]: DEBUG nova.network.neutron [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2411.456901] env[69027]: DEBUG nova.network.neutron [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2411.468484] env[69027]: INFO nova.compute.manager [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Took 0.42 seconds to deallocate network for instance. [ 2411.574047] env[69027]: INFO nova.scheduler.client.report [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Deleted allocations for instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea [ 2411.597023] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2b60b94b-b696-416e-ac27-ade37fd009be tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 561.341s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.597023] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 365.322s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.597023] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Acquiring lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2411.597023] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.597023] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.598355] env[69027]: INFO nova.compute.manager [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Terminating instance [ 2411.600223] env[69027]: DEBUG nova.compute.manager [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Start destroying the instance on the hypervisor. {{(pid=69027) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2411.602569] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Destroying instance {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2411.602569] env[69027]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-77ef6047-7066-4dea-ab78-4afdacdd4520 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.611232] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3764bff2-13de-4b82-a7e3-8796ce0b1831 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.637149] env[69027]: WARNING nova.virt.vmwareapi.vmops [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea could not be found. [ 2411.637366] env[69027]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Instance destroyed {{(pid=69027) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2411.637581] env[69027]: INFO nova.compute.manager [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2411.637858] env[69027]: DEBUG oslo.service.loopingcall [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69027) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2411.638099] env[69027]: DEBUG nova.compute.manager [-] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Deallocating network for instance {{(pid=69027) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2411.638199] env[69027]: DEBUG nova.network.neutron [-] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] deallocate_for_instance() {{(pid=69027) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2411.661447] env[69027]: DEBUG nova.network.neutron [-] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Updating instance_info_cache with network_info: [] {{(pid=69027) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2411.670097] env[69027]: INFO nova.compute.manager [-] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] Took 0.03 seconds to deallocate network for instance. [ 2411.754158] env[69027]: DEBUG oslo_concurrency.lockutils [None req-0cfd5052-b535-47fd-8806-e7b26c363807 tempest-ServerDiskConfigTestJSON-1018388087 tempest-ServerDiskConfigTestJSON-1018388087-project-member] Lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.754978] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 64.662s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.755191] env[69027]: INFO nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea] During sync_power_state the instance has a pending task (deleting). Skip. [ 2411.755368] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "ff7dd864-8df9-43e5-9fa1-a3bfc47ec4ea" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.784191] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2413.784458] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Starting heal instance info cache {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2413.784582] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Rebuilding the list of instances to heal {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2413.808740] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 22e27e0c-3cac-4794-b53a-4df7b8b92ec9] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2413.808907] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: c78bd74b-1d1b-46bc-9fd8-a553f23e6671] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2413.809051] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: a1088215-4676-47d3-9df6-e835d32f4b5a] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2413.809196] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: fc594297-bbf9-4ea4-82c6-a709b4e0c3f8] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2413.809322] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] [instance: 647c0734-c3ed-47eb-807a-e8034e5378f1] Skipping network cache update for instance because it is Building. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2413.809443] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Didn't find any instances for network info cache update. {{(pid=69027) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2416.771594] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2419.772929] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2419.785309] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2419.785529] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2419.785698] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2419.785855] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69027) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2419.786986] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e443c1ad-ae2e-4db2-9db7-7f08d0b02245 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.795866] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6742badd-ea00-477a-8745-f59306af9fed {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.810823] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab338ef-869b-4ad9-84e4-638c06156222 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.816904] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469ebab3-0928-4b95-ac1b-bfff6948568b {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.845278] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180998MB free_disk=102GB free_vcpus=48 pci_devices=None {{(pid=69027) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2419.845409] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2419.845600] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2419.902638] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 22e27e0c-3cac-4794-b53a-4df7b8b92ec9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2419.902903] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance c78bd74b-1d1b-46bc-9fd8-a553f23e6671 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2419.903100] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance a1088215-4676-47d3-9df6-e835d32f4b5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2419.903304] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance fc594297-bbf9-4ea4-82c6-a709b4e0c3f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2419.903474] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Instance 647c0734-c3ed-47eb-807a-e8034e5378f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=69027) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2419.903679] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2419.903816] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=69027) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2419.975659] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e64079a-1545-4924-9ec4-4a9481a30bd2 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2419.983371] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae31dd7-522a-4b66-a2c5-c1d8431bc4d8 {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.014860] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67dd424b-3983-42dc-a39e-69e376a642de {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.021543] env[69027]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523a6446-5314-46a0-b7c4-f4547b4f7d1e {{(pid=69027) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2420.034705] env[69027]: DEBUG nova.compute.provider_tree [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed in ProviderTree for provider: 4923c91f-3b2b-4ad1-a821-36209acae639 {{(pid=69027) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2420.043420] env[69027]: DEBUG nova.scheduler.client.report [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Inventory has not changed for provider 4923c91f-3b2b-4ad1-a821-36209acae639 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 102, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69027) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2420.056382] env[69027]: DEBUG nova.compute.resource_tracker [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69027) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2420.056572] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.211s {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2421.050615] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.770851] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2422.771683] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2422.771952] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2423.771580] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2423.771770] env[69027]: DEBUG nova.compute.manager [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69027) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2424.771800] env[69027]: DEBUG oslo_service.periodic_task [None req-2686f5af-ee18-4f23-ac8c-5df5cd6bc244 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69027) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2430.713855] env[69027]: DEBUG oslo_concurrency.lockutils [None req-2ca415cd-1941-4a85-a6c6-6a96cd021c0b tempest-DeleteServersTestJSON-529940143 tempest-DeleteServersTestJSON-529940143-project-member] Acquiring lock "a1088215-4676-47d3-9df6-e835d32f4b5a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69027) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}}